From b4a729ed027621a062c4a1ad9c50d81e6fdd8758 Mon Sep 17 00:00:00 2001 From: stack Date: Thu, 29 Sep 2016 14:57:45 -0700 Subject: [PATCH] HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteString --- hbase-protocol-shaded/pom.xml | 8 +- .../google/protobuf/HBaseZeroCopyByteString.java | 77 - .../protobuf/generated/TestProcedureProtos.java | 290 +- .../shaded/ipc/protobuf/generated/TestProtos.java | 1417 +- .../protobuf/generated/TestRpcServiceProtos.java | 27 +- .../shaded/protobuf/generated/AdminProtos.java | 9459 ++++--- .../shaded/protobuf/generated/CellProtos.java | 657 +- .../shaded/protobuf/generated/ClientProtos.java | 11869 ++++---- .../shaded/protobuf/generated/ClusterIdProtos.java | 322 +- .../protobuf/generated/ClusterStatusProtos.java | 4554 +-- .../protobuf/generated/ComparatorProtos.java | 2338 +- .../protobuf/generated/EncryptionProtos.java | 288 +- .../protobuf/generated/ErrorHandlingProtos.java | 908 +- .../hbase/shaded/protobuf/generated/FSProtos.java | 568 +- .../shaded/protobuf/generated/FilterProtos.java | 7403 ++--- .../shaded/protobuf/generated/HBaseProtos.java | 7025 ++--- .../shaded/protobuf/generated/HFileProtos.java | 602 +- .../protobuf/generated/LoadBalancerProtos.java | 249 +- .../shaded/protobuf/generated/MapReduceProtos.java | 575 +- .../protobuf/generated/MasterProcedureProtos.java | 6021 ++-- .../shaded/protobuf/generated/MasterProtos.java | 27544 +++++++++++-------- .../shaded/protobuf/generated/ProcedureProtos.java | 2629 +- .../shaded/protobuf/generated/QuotaProtos.java | 1597 +- .../hbase/shaded/protobuf/generated/RPCProtos.java | 2423 +- .../protobuf/generated/RegionNormalizerProtos.java | 249 +- .../generated/RegionServerStatusProtos.java | 3785 +-- .../shaded/protobuf/generated/SnapshotProtos.java | 1489 +- .../shaded/protobuf/generated/TracingProtos.java | 265 +- .../hbase/shaded/protobuf/generated/WALProtos.java | 3886 +-- .../shaded/protobuf/generated/ZooKeeperProtos.java | 3587 +-- .../hadoop/hbase/shaded/util/ByteStringer.java | 29 +- 31 files changed, 56460 insertions(+), 45680 deletions(-) delete mode 100644 hbase-protocol-shaded/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml index 46f5087..1bd0aa6 100644 --- a/hbase-protocol-shaded/pom.xml +++ b/hbase-protocol-shaded/pom.xml @@ -33,6 +33,9 @@ true + + 3.1.0 @@ -197,7 +200,7 @@ com.google.protobuf protobuf-java - 2.5.0 + ${internal.protobuf.version} commons-logging @@ -230,6 +233,9 @@ org.apache.hadoop hadoop-maven-plugins + + ${internal.protobuf.version} + compile-protoc diff --git a/hbase-protocol-shaded/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java b/hbase-protocol-shaded/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java deleted file mode 100644 index 8be8ff4..0000000 --- a/hbase-protocol-shaded/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java +++ /dev/null @@ -1,77 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.protobuf; // This is a lie. - -/** - * Helper class to extract byte arrays from {@link ByteString} without copy. - *

- * Without this protobufs would force us to copy every single byte array out - * of the objects de-serialized from the wire (which already do one copy, on - * top of the copies the JVM does to go from kernel buffer to C buffer and - * from C buffer to JVM buffer). - * - * @since 0.96.1 - */ -// This will go away when we go to pb3.0.0. Meantime there are two instances of this class... one -// in this module and another over in hbase-protocol. They are different in that one is shaded and -// the other is not so could be problematic while the two versions exist! -public final class HBaseZeroCopyByteString extends LiteralByteString { - // Gotten from AsyncHBase code base with permission. - /** Private constructor so this class cannot be instantiated. */ - private HBaseZeroCopyByteString() { - super(null); - throw new UnsupportedOperationException("Should never be here."); - } - - /** - * Wraps a byte array in a {@link ByteString} without copying it. - * @param array array to be wrapped - * @return wrapped array - */ - public static ByteString wrap(final byte[] array) { - return new LiteralByteString(array); - } - - /** - * Wraps a subset of a byte array in a {@link ByteString} without copying it. - * @param array array to be wrapped - * @param offset from - * @param length length - * @return wrapped array - */ - public static ByteString wrap(final byte[] array, int offset, int length) { - return new BoundedByteString(array, offset, length); - } - - // TODO: - // ZeroCopyLiteralByteString.wrap(this.buf, 0, this.count); - - /** - * Extracts the byte array from the given {@link ByteString} without copy. - * @param buf A buffer from which to extract the array. This buffer must be - * actually an instance of a {@code LiteralByteString}. - * @return byte[] representation - */ - public static byte[] zeroCopyGetBytes(final ByteString buf) { - if (buf instanceof LiteralByteString) { - return ((LiteralByteString) buf).bytes; - } - throw new UnsupportedOperationException("Need a LiteralByteString, got a " - + buf.getClass().getName()); - } -} \ No newline at end of file diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java index baf4e64..641bde3 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.ipc.protobuf.generated; public final class TestProcedureProtos { private TestProcedureProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface TestTableDDLStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TestTableDDLStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:TestTableDDLStateData) + com.google.protobuf.MessageOrBuilder { - // required string table_name = 1; /** * required string table_name = 1; */ @@ -29,36 +35,28 @@ public final class TestProcedureProtos { /** * Protobuf type {@code TestTableDDLStateData} */ - public static final class TestTableDDLStateData extends - com.google.protobuf.GeneratedMessage - implements TestTableDDLStateDataOrBuilder { + public static final class TestTableDDLStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:TestTableDDLStateData) + TestTableDDLStateDataOrBuilder { // Use TestTableDDLStateData.newBuilder() to construct. - private TestTableDDLStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private TestTableDDLStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TestTableDDLStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TestTableDDLStateData defaultInstance; - public static TestTableDDLStateData getDefaultInstance() { - return defaultInstance; } - - public TestTableDDLStateData getDefaultInstanceForType() { - return defaultInstance; + private TestTableDDLStateData() { + tableName_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TestTableDDLStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -78,8 +76,9 @@ public final class TestProcedureProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); + tableName_ = bs; break; } } @@ -88,7 +87,7 @@ public final class TestProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -99,32 +98,16 @@ public final class TestProcedureProtos { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TestTableDDLStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TestTableDDLStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; - private java.lang.Object tableName_; + private volatile java.lang.Object tableName_; /** * required string table_name = 1; */ @@ -165,13 +148,11 @@ public final class TestProcedureProtos { } } - private void initFields() { - tableName_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -183,33 +164,60 @@ public final class TestProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getTableNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, tableName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getTableNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, tableName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData other = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( @@ -235,46 +243,57 @@ public final class TestProcedureProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -282,14 +301,15 @@ public final class TestProcedureProtos { * Protobuf type {@code TestTableDDLStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:TestTableDDLStateData) + org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -302,18 +322,15 @@ public final class TestProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); tableName_ = ""; @@ -321,10 +338,6 @@ public final class TestProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor; @@ -355,6 +368,32 @@ public final class TestProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData)other); @@ -371,13 +410,13 @@ public final class TestProcedureProtos { tableName_ = other.tableName_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } return true; @@ -392,7 +431,7 @@ public final class TestProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -402,7 +441,6 @@ public final class TestProcedureProtos { } private int bitField0_; - // required string table_name = 1; private java.lang.Object tableName_ = ""; /** * required string table_name = 1; @@ -416,9 +454,12 @@ public final class TestProcedureProtos { public java.lang.String getTableName() { java.lang.Object ref = tableName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - tableName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + tableName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -475,29 +516,66 @@ public final class TestProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:TestTableDDLStateData) } + // @@protoc_insertion_point(class_scope:TestTableDDLStateData) + private static final org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData DEFAULT_INSTANCE; static { - defaultInstance = new TestTableDDLStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData(); + } + + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TestTableDDLStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TestTableDDLStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:TestTableDDLStateData) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_TestTableDDLStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_TestTableDDLStateData_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -507,23 +585,23 @@ public final class TestProcedureProtos { "tedB\023TestProcedureProtos\210\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_TestTableDDLStateData_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_TestTableDDLStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TestTableDDLStateData_descriptor, - new java.lang.String[] { "TableName", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_TestTableDDLStateData_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_TestTableDDLStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_TestTableDDLStateData_descriptor, + new java.lang.String[] { "TableName", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java index 417ee00..0fdd1fb 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java @@ -6,44 +6,42 @@ package org.apache.hadoop.hbase.shaded.ipc.protobuf.generated; public final class TestProtos { private TestProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface EmptyRequestProtoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface EmptyRequestProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:EmptyRequestProto) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code EmptyRequestProto} */ - public static final class EmptyRequestProto extends - com.google.protobuf.GeneratedMessage - implements EmptyRequestProtoOrBuilder { + public static final class EmptyRequestProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:EmptyRequestProto) + EmptyRequestProtoOrBuilder { // Use EmptyRequestProto.newBuilder() to construct. - private EmptyRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) { + private EmptyRequestProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private EmptyRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final EmptyRequestProto defaultInstance; - public static EmptyRequestProto getDefaultInstance() { - return defaultInstance; - } - - public EmptyRequestProto getDefaultInstanceForType() { - return defaultInstance; + private EmptyRequestProto() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private EmptyRequestProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -67,7 +65,7 @@ public final class TestProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -78,34 +76,18 @@ public final class TestProtos { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto.class, org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public EmptyRequestProto parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new EmptyRequestProto(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -113,29 +95,21 @@ public final class TestProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -146,12 +120,10 @@ public final class TestProtos { org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto other = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -159,7 +131,7 @@ public final class TestProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -187,46 +159,57 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -234,14 +217,15 @@ public final class TestProtos { * Protobuf type {@code EmptyRequestProto} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProtoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:EmptyRequestProto) + org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -254,27 +238,20 @@ public final class TestProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyRequestProto_descriptor; @@ -298,6 +275,32 @@ public final class TestProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto)other); @@ -309,7 +312,8 @@ public final class TestProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto other) { if (other == org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -326,7 +330,7 @@ public final class TestProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -334,54 +338,83 @@ public final class TestProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:EmptyRequestProto) } + // @@protoc_insertion_point(class_scope:EmptyRequestProto) + private static final org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto DEFAULT_INSTANCE; static { - defaultInstance = new EmptyRequestProto(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto(); + } + + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public EmptyRequestProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EmptyRequestProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:EmptyRequestProto) } - public interface EmptyResponseProtoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface EmptyResponseProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:EmptyResponseProto) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code EmptyResponseProto} */ - public static final class EmptyResponseProto extends - com.google.protobuf.GeneratedMessage - implements EmptyResponseProtoOrBuilder { + public static final class EmptyResponseProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:EmptyResponseProto) + EmptyResponseProtoOrBuilder { // Use EmptyResponseProto.newBuilder() to construct. - private EmptyResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) { + private EmptyResponseProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private EmptyResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final EmptyResponseProto defaultInstance; - public static EmptyResponseProto getDefaultInstance() { - return defaultInstance; - } - - public EmptyResponseProto getDefaultInstanceForType() { - return defaultInstance; + private EmptyResponseProto() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private EmptyResponseProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -405,7 +438,7 @@ public final class TestProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -416,34 +449,18 @@ public final class TestProtos { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class, org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public EmptyResponseProto parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new EmptyResponseProto(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -451,29 +468,21 @@ public final class TestProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -484,12 +493,10 @@ public final class TestProtos { org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto other = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -497,7 +504,7 @@ public final class TestProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -525,46 +532,57 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -572,14 +590,15 @@ public final class TestProtos { * Protobuf type {@code EmptyResponseProto} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProtoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:EmptyResponseProto) + org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -592,27 +611,20 @@ public final class TestProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EmptyResponseProto_descriptor; @@ -636,6 +648,32 @@ public final class TestProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto)other); @@ -647,7 +685,8 @@ public final class TestProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto other) { if (other == org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -664,7 +703,7 @@ public final class TestProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -672,22 +711,59 @@ public final class TestProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:EmptyResponseProto) } + // @@protoc_insertion_point(class_scope:EmptyResponseProto) + private static final org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto DEFAULT_INSTANCE; static { - defaultInstance = new EmptyResponseProto(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto(); + } + + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public EmptyResponseProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EmptyResponseProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:EmptyResponseProto) } - public interface EchoRequestProtoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface EchoRequestProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:EchoRequestProto) + com.google.protobuf.MessageOrBuilder { - // required string message = 1; /** * required string message = 1; */ @@ -705,36 +781,28 @@ public final class TestProtos { /** * Protobuf type {@code EchoRequestProto} */ - public static final class EchoRequestProto extends - com.google.protobuf.GeneratedMessage - implements EchoRequestProtoOrBuilder { + public static final class EchoRequestProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:EchoRequestProto) + EchoRequestProtoOrBuilder { // Use EchoRequestProto.newBuilder() to construct. - private EchoRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) { + private EchoRequestProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private EchoRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final EchoRequestProto defaultInstance; - public static EchoRequestProto getDefaultInstance() { - return defaultInstance; - } - - public EchoRequestProto getDefaultInstanceForType() { - return defaultInstance; + private EchoRequestProto() { + message_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private EchoRequestProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -754,8 +822,9 @@ public final class TestProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - message_ = input.readBytes(); + message_ = bs; break; } } @@ -764,7 +833,7 @@ public final class TestProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -775,32 +844,16 @@ public final class TestProtos { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto.class, org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public EchoRequestProto parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new EchoRequestProto(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string message = 1; public static final int MESSAGE_FIELD_NUMBER = 1; - private java.lang.Object message_; + private volatile java.lang.Object message_; /** * required string message = 1; */ @@ -841,13 +894,11 @@ public final class TestProtos { } } - private void initFields() { - message_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasMessage()) { memoizedIsInitialized = 0; @@ -859,36 +910,27 @@ public final class TestProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getMessageBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, message_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getMessageBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, message_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -904,12 +946,10 @@ public final class TestProtos { result = result && getMessage() .equals(other.getMessage()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -921,7 +961,7 @@ public final class TestProtos { hash = (37 * hash) + MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getMessage().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -949,46 +989,57 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -996,14 +1047,15 @@ public final class TestProtos { * Protobuf type {@code EchoRequestProto} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProtoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:EchoRequestProto) + org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1016,18 +1068,15 @@ public final class TestProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); message_ = ""; @@ -1035,10 +1084,6 @@ public final class TestProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EchoRequestProto_descriptor; @@ -1069,6 +1114,32 @@ public final class TestProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto)other); @@ -1085,13 +1156,13 @@ public final class TestProtos { message_ = other.message_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasMessage()) { - return false; } return true; @@ -1106,7 +1177,7 @@ public final class TestProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1116,7 +1187,6 @@ public final class TestProtos { } private int bitField0_; - // required string message = 1; private java.lang.Object message_ = ""; /** * required string message = 1; @@ -1130,9 +1200,12 @@ public final class TestProtos { public java.lang.String getMessage() { java.lang.Object ref = message_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - message_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + message_ = s; + } return s; } else { return (java.lang.String) ref; @@ -1189,22 +1262,59 @@ public final class TestProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:EchoRequestProto) } + // @@protoc_insertion_point(class_scope:EchoRequestProto) + private static final org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto DEFAULT_INSTANCE; static { - defaultInstance = new EchoRequestProto(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto(); + } + + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public EchoRequestProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EchoRequestProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:EchoRequestProto) } - public interface EchoResponseProtoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface EchoResponseProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:EchoResponseProto) + com.google.protobuf.MessageOrBuilder { - // required string message = 1; /** * required string message = 1; */ @@ -1222,36 +1332,28 @@ public final class TestProtos { /** * Protobuf type {@code EchoResponseProto} */ - public static final class EchoResponseProto extends - com.google.protobuf.GeneratedMessage - implements EchoResponseProtoOrBuilder { + public static final class EchoResponseProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:EchoResponseProto) + EchoResponseProtoOrBuilder { // Use EchoResponseProto.newBuilder() to construct. - private EchoResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) { + private EchoResponseProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private EchoResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final EchoResponseProto defaultInstance; - public static EchoResponseProto getDefaultInstance() { - return defaultInstance; - } - - public EchoResponseProto getDefaultInstanceForType() { - return defaultInstance; + private EchoResponseProto() { + message_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private EchoResponseProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1271,8 +1373,9 @@ public final class TestProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - message_ = input.readBytes(); + message_ = bs; break; } } @@ -1281,7 +1384,7 @@ public final class TestProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1292,32 +1395,16 @@ public final class TestProtos { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto.class, org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public EchoResponseProto parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new EchoResponseProto(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string message = 1; public static final int MESSAGE_FIELD_NUMBER = 1; - private java.lang.Object message_; + private volatile java.lang.Object message_; /** * required string message = 1; */ @@ -1358,13 +1445,11 @@ public final class TestProtos { } } - private void initFields() { - message_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasMessage()) { memoizedIsInitialized = 0; @@ -1376,36 +1461,27 @@ public final class TestProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getMessageBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, message_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getMessageBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, message_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1421,12 +1497,10 @@ public final class TestProtos { result = result && getMessage() .equals(other.getMessage()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1438,7 +1512,7 @@ public final class TestProtos { hash = (37 * hash) + MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getMessage().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1466,46 +1540,57 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1513,14 +1598,15 @@ public final class TestProtos { * Protobuf type {@code EchoResponseProto} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProtoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:EchoResponseProto) + org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1533,18 +1619,15 @@ public final class TestProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); message_ = ""; @@ -1552,10 +1635,6 @@ public final class TestProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_EchoResponseProto_descriptor; @@ -1586,6 +1665,32 @@ public final class TestProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto)other); @@ -1602,13 +1707,13 @@ public final class TestProtos { message_ = other.message_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasMessage()) { - return false; } return true; @@ -1623,7 +1728,7 @@ public final class TestProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1633,7 +1738,6 @@ public final class TestProtos { } private int bitField0_; - // required string message = 1; private java.lang.Object message_ = ""; /** * required string message = 1; @@ -1647,9 +1751,12 @@ public final class TestProtos { public java.lang.String getMessage() { java.lang.Object ref = message_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - message_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + message_ = s; + } return s; } else { return (java.lang.String) ref; @@ -1706,22 +1813,59 @@ public final class TestProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:EchoResponseProto) } + // @@protoc_insertion_point(class_scope:EchoResponseProto) + private static final org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto DEFAULT_INSTANCE; static { - defaultInstance = new EchoResponseProto(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto(); + } + + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public EchoResponseProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EchoResponseProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:EchoResponseProto) } - public interface PauseRequestProtoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface PauseRequestProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:PauseRequestProto) + com.google.protobuf.MessageOrBuilder { - // required uint32 ms = 1; /** * required uint32 ms = 1; */ @@ -1734,36 +1878,28 @@ public final class TestProtos { /** * Protobuf type {@code PauseRequestProto} */ - public static final class PauseRequestProto extends - com.google.protobuf.GeneratedMessage - implements PauseRequestProtoOrBuilder { + public static final class PauseRequestProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:PauseRequestProto) + PauseRequestProtoOrBuilder { // Use PauseRequestProto.newBuilder() to construct. - private PauseRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) { + private PauseRequestProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private PauseRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final PauseRequestProto defaultInstance; - public static PauseRequestProto getDefaultInstance() { - return defaultInstance; } - - public PauseRequestProto getDefaultInstanceForType() { - return defaultInstance; + private PauseRequestProto() { + ms_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private PauseRequestProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1793,7 +1929,7 @@ public final class TestProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1804,30 +1940,14 @@ public final class TestProtos { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_PauseRequestProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_PauseRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto.class, org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public PauseRequestProto parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new PauseRequestProto(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint32 ms = 1; public static final int MS_FIELD_NUMBER = 1; private int ms_; /** @@ -1843,13 +1963,11 @@ public final class TestProtos { return ms_; } - private void initFields() { - ms_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasMs()) { memoizedIsInitialized = 0; @@ -1861,16 +1979,14 @@ public final class TestProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, ms_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -1878,19 +1994,13 @@ public final class TestProtos { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, ms_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1906,12 +2016,10 @@ public final class TestProtos { result = result && (getMs() == other.getMs()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1923,7 +2031,7 @@ public final class TestProtos { hash = (37 * hash) + MS_FIELD_NUMBER; hash = (53 * hash) + getMs(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1951,46 +2059,57 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1998,14 +2117,15 @@ public final class TestProtos { * Protobuf type {@code PauseRequestProto} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProtoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:PauseRequestProto) + org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_PauseRequestProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_PauseRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2018,18 +2138,15 @@ public final class TestProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); ms_ = 0; @@ -2037,10 +2154,6 @@ public final class TestProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_PauseRequestProto_descriptor; @@ -2071,6 +2184,32 @@ public final class TestProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto)other); @@ -2085,13 +2224,13 @@ public final class TestProtos { if (other.hasMs()) { setMs(other.getMs()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasMs()) { - return false; } return true; @@ -2106,7 +2245,7 @@ public final class TestProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2116,7 +2255,6 @@ public final class TestProtos { } private int bitField0_; - // required uint32 ms = 1; private int ms_ ; /** * required uint32 ms = 1; @@ -2148,22 +2286,59 @@ public final class TestProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:PauseRequestProto) } + // @@protoc_insertion_point(class_scope:PauseRequestProto) + private static final org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto DEFAULT_INSTANCE; static { - defaultInstance = new PauseRequestProto(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto(); + } + + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public PauseRequestProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PauseRequestProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:PauseRequestProto) } - public interface AddrResponseProtoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface AddrResponseProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:AddrResponseProto) + com.google.protobuf.MessageOrBuilder { - // required string addr = 1; /** * required string addr = 1; */ @@ -2181,36 +2356,28 @@ public final class TestProtos { /** * Protobuf type {@code AddrResponseProto} */ - public static final class AddrResponseProto extends - com.google.protobuf.GeneratedMessage - implements AddrResponseProtoOrBuilder { + public static final class AddrResponseProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:AddrResponseProto) + AddrResponseProtoOrBuilder { // Use AddrResponseProto.newBuilder() to construct. - private AddrResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) { + private AddrResponseProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private AddrResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AddrResponseProto defaultInstance; - public static AddrResponseProto getDefaultInstance() { - return defaultInstance; } - - public AddrResponseProto getDefaultInstanceForType() { - return defaultInstance; + private AddrResponseProto() { + addr_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private AddrResponseProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2230,8 +2397,9 @@ public final class TestProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - addr_ = input.readBytes(); + addr_ = bs; break; } } @@ -2240,7 +2408,7 @@ public final class TestProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2251,32 +2419,16 @@ public final class TestProtos { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_AddrResponseProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_AddrResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto.class, org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public AddrResponseProto parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AddrResponseProto(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string addr = 1; public static final int ADDR_FIELD_NUMBER = 1; - private java.lang.Object addr_; + private volatile java.lang.Object addr_; /** * required string addr = 1; */ @@ -2317,13 +2469,11 @@ public final class TestProtos { } } - private void initFields() { - addr_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasAddr()) { memoizedIsInitialized = 0; @@ -2335,36 +2485,27 @@ public final class TestProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getAddrBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, addr_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getAddrBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, addr_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2380,12 +2521,10 @@ public final class TestProtos { result = result && getAddr() .equals(other.getAddr()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2397,7 +2536,7 @@ public final class TestProtos { hash = (37 * hash) + ADDR_FIELD_NUMBER; hash = (53 * hash) + getAddr().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2425,46 +2564,57 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2472,14 +2622,15 @@ public final class TestProtos { * Protobuf type {@code AddrResponseProto} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProtoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:AddrResponseProto) + org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_AddrResponseProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_AddrResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2492,18 +2643,15 @@ public final class TestProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); addr_ = ""; @@ -2511,10 +2659,6 @@ public final class TestProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.internal_static_AddrResponseProto_descriptor; @@ -2545,6 +2689,32 @@ public final class TestProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto)other); @@ -2561,13 +2731,13 @@ public final class TestProtos { addr_ = other.addr_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasAddr()) { - return false; } return true; @@ -2582,7 +2752,7 @@ public final class TestProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2592,7 +2762,6 @@ public final class TestProtos { } private int bitField0_; - // required string addr = 1; private java.lang.Object addr_ = ""; /** * required string addr = 1; @@ -2606,9 +2775,12 @@ public final class TestProtos { public java.lang.String getAddr() { java.lang.Object ref = addr_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - addr_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + addr_ = s; + } return s; } else { return (java.lang.String) ref; @@ -2665,54 +2837,91 @@ public final class TestProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:AddrResponseProto) } + // @@protoc_insertion_point(class_scope:AddrResponseProto) + private static final org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto DEFAULT_INSTANCE; static { - defaultInstance = new AddrResponseProto(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto(); + } + + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AddrResponseProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AddrResponseProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:AddrResponseProto) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_EmptyRequestProto_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_EmptyRequestProto_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_EmptyResponseProto_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_EmptyResponseProto_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_EchoRequestProto_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_EchoRequestProto_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_EchoResponseProto_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_EchoResponseProto_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_PauseRequestProto_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_PauseRequestProto_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_AddrResponseProto_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_AddrResponseProto_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -2725,53 +2934,53 @@ public final class TestProtos { "tobuf.generatedB\nTestProtos\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_EmptyRequestProto_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_EmptyRequestProto_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_EmptyRequestProto_descriptor, - new java.lang.String[] { }); - internal_static_EmptyResponseProto_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_EmptyResponseProto_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_EmptyResponseProto_descriptor, - new java.lang.String[] { }); - internal_static_EchoRequestProto_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_EchoRequestProto_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_EchoRequestProto_descriptor, - new java.lang.String[] { "Message", }); - internal_static_EchoResponseProto_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_EchoResponseProto_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_EchoResponseProto_descriptor, - new java.lang.String[] { "Message", }); - internal_static_PauseRequestProto_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_PauseRequestProto_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_PauseRequestProto_descriptor, - new java.lang.String[] { "Ms", }); - internal_static_AddrResponseProto_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_AddrResponseProto_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AddrResponseProto_descriptor, - new java.lang.String[] { "Addr", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_EmptyRequestProto_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_EmptyRequestProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_EmptyRequestProto_descriptor, + new java.lang.String[] { }); + internal_static_EmptyResponseProto_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_EmptyResponseProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_EmptyResponseProto_descriptor, + new java.lang.String[] { }); + internal_static_EchoRequestProto_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_EchoRequestProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_EchoRequestProto_descriptor, + new java.lang.String[] { "Message", }); + internal_static_EchoResponseProto_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_EchoResponseProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_EchoResponseProto_descriptor, + new java.lang.String[] { "Message", }); + internal_static_PauseRequestProto_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_PauseRequestProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_PauseRequestProto_descriptor, + new java.lang.String[] { "Ms", }); + internal_static_AddrResponseProto_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_AddrResponseProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_AddrResponseProto_descriptor, + new java.lang.String[] { "Addr", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestRpcServiceProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestRpcServiceProtos.java index 00a4c8d..86d12f3 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestRpcServiceProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestRpcServiceProtos.java @@ -6,15 +6,21 @@ package org.apache.hadoop.hbase.shaded.ipc.protobuf.generated; public final class TestRpcServiceProtos { private TestRpcServiceProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } /** - * Protobuf service {@code TestProtobufRpcProto} - * *

    **
    * A protobuf service for use in tests
    * 
+ * + * Protobuf service {@code TestProtobufRpcProto} */ public static abstract class TestProtobufRpcProto implements com.google.protobuf.Service { @@ -534,7 +540,7 @@ public final class TestRpcServiceProtos { getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -550,18 +556,19 @@ public final class TestRpcServiceProtos { "edB\024TestRpcServiceProtos\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.getDescriptor(), }, assigner); + org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java index f54d238..e15dbc4 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class AdminProtos { private AdminProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface GetRegionInfoRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetRegionInfoRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetRegionInfoRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -25,7 +31,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // optional bool compaction_state = 2; /** * optional bool compaction_state = 2; */ @@ -38,36 +43,28 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.GetRegionInfoRequest} */ - public static final class GetRegionInfoRequest extends - com.google.protobuf.GeneratedMessage - implements GetRegionInfoRequestOrBuilder { + public static final class GetRegionInfoRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetRegionInfoRequest) + GetRegionInfoRequestOrBuilder { // Use GetRegionInfoRequest.newBuilder() to construct. - private GetRegionInfoRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetRegionInfoRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetRegionInfoRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetRegionInfoRequest defaultInstance; - public static GetRegionInfoRequest getDefaultInstance() { - return defaultInstance; } - - public GetRegionInfoRequest getDefaultInstanceForType() { - return defaultInstance; + private GetRegionInfoRequest() { + compactionState_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetRegionInfoRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -110,7 +107,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -121,30 +118,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetRegionInfoRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetRegionInfoRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -157,16 +138,15 @@ public final class AdminProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // optional bool compaction_state = 2; public static final int COMPACTION_STATE_FIELD_NUMBER = 2; private boolean compactionState_; /** @@ -182,14 +162,11 @@ public final class AdminProtos { return compactionState_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - compactionState_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -205,43 +182,35 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, compactionState_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, compactionState_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -262,12 +231,10 @@ public final class AdminProtos { result = result && (getCompactionState() == other.getCompactionState()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -281,9 +248,10 @@ public final class AdminProtos { } if (hasCompactionState()) { hash = (37 * hash) + COMPACTION_STATE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCompactionState()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getCompactionState()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -311,46 +279,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -358,14 +337,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.GetRegionInfoRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetRegionInfoRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -378,23 +358,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -404,10 +381,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoRequest_descriptor; @@ -446,6 +419,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest)other); @@ -463,17 +462,16 @@ public final class AdminProtos { if (other.hasCompactionState()) { setCompactionState(other.getCompactionState()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } return true; @@ -488,7 +486,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -498,9 +496,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -513,7 +510,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -554,6 +551,7 @@ public final class AdminProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -572,7 +570,7 @@ public final class AdminProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -595,19 +593,20 @@ public final class AdminProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -615,7 +614,6 @@ public final class AdminProtos { return regionBuilder_; } - // optional bool compaction_state = 2; private boolean compactionState_ ; /** * optional bool compaction_state = 2; @@ -647,22 +645,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetRegionInfoRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetRegionInfoRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetRegionInfoRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetRegionInfoRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetRegionInfoRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetRegionInfoRequest) } - public interface GetRegionInfoResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetRegionInfoResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetRegionInfoResponse) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionInfo region_info = 1; /** * required .hbase.pb.RegionInfo region_info = 1; */ @@ -676,7 +711,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); - // optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2; /** * optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2; */ @@ -686,7 +720,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState(); - // optional bool isRecovering = 3; /** * optional bool isRecovering = 3; */ @@ -699,36 +732,29 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.GetRegionInfoResponse} */ - public static final class GetRegionInfoResponse extends - com.google.protobuf.GeneratedMessage - implements GetRegionInfoResponseOrBuilder { + public static final class GetRegionInfoResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetRegionInfoResponse) + GetRegionInfoResponseOrBuilder { // Use GetRegionInfoResponse.newBuilder() to construct. - private GetRegionInfoResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetRegionInfoResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetRegionInfoResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetRegionInfoResponse defaultInstance; - public static GetRegionInfoResponse getDefaultInstance() { - return defaultInstance; } - - public GetRegionInfoResponse getDefaultInstanceForType() { - return defaultInstance; + private GetRegionInfoResponse() { + compactionState_ = 0; + isRecovering_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetRegionInfoResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -767,7 +793,7 @@ public final class AdminProtos { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; - compactionState_ = value; + compactionState_ = rawValue; } break; } @@ -782,7 +808,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -793,28 +819,13 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetRegionInfoResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetRegionInfoResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.GetRegionInfoResponse.CompactionState} */ @@ -823,19 +834,19 @@ public final class AdminProtos { /** * NONE = 0; */ - NONE(0, 0), + NONE(0), /** * MINOR = 1; */ - MINOR(1, 1), + MINOR(1), /** * MAJOR = 2; */ - MAJOR(2, 2), + MAJOR(2), /** * MAJOR_AND_MINOR = 3; */ - MAJOR_AND_MINOR(3, 3), + MAJOR_AND_MINOR(3), ; /** @@ -856,9 +867,19 @@ public final class AdminProtos { public static final int MAJOR_AND_MINOR_VALUE = 3; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static CompactionState valueOf(int value) { + return forNumber(value); + } + + public static CompactionState forNumber(int value) { switch (value) { case 0: return NONE; case 1: return MINOR; @@ -872,17 +893,17 @@ public final class AdminProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + CompactionState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public CompactionState findValueByNumber(int number) { - return CompactionState.valueOf(number); + return CompactionState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -904,11 +925,9 @@ public final class AdminProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private CompactionState(int index, int value) { - this.index = index; + private CompactionState(int value) { this.value = value; } @@ -916,7 +935,6 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionInfo region_info = 1; public static final int REGION_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; /** @@ -929,18 +947,17 @@ public final class AdminProtos { * required .hbase.pb.RegionInfo region_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } /** * required .hbase.pb.RegionInfo region_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } - // optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2; public static final int COMPACTION_STATE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_; + private int compactionState_; /** * optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2; */ @@ -951,10 +968,10 @@ public final class AdminProtos { * optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() { - return compactionState_; + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState result = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(compactionState_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE : result; } - // optional bool isRecovering = 3; public static final int ISRECOVERING_FIELD_NUMBER = 3; private boolean isRecovering_; /** @@ -970,15 +987,11 @@ public final class AdminProtos { return isRecovering_; } - private void initFields() { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - compactionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; - isRecovering_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegionInfo()) { memoizedIsInitialized = 0; @@ -994,50 +1007,42 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionInfo_); + output.writeMessage(1, getRegionInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, compactionState_.getNumber()); + output.writeEnum(2, compactionState_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, isRecovering_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionInfo_); + .computeMessageSize(1, getRegionInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, compactionState_.getNumber()); + .computeEnumSize(2, compactionState_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, isRecovering_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1055,20 +1060,17 @@ public final class AdminProtos { } result = result && (hasCompactionState() == other.hasCompactionState()); if (hasCompactionState()) { - result = result && - (getCompactionState() == other.getCompactionState()); + result = result && compactionState_ == other.compactionState_; } result = result && (hasIsRecovering() == other.hasIsRecovering()); if (hasIsRecovering()) { result = result && (getIsRecovering() == other.getIsRecovering()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1082,13 +1084,14 @@ public final class AdminProtos { } if (hasCompactionState()) { hash = (37 * hash) + COMPACTION_STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getCompactionState()); + hash = (53 * hash) + compactionState_; } if (hasIsRecovering()) { hash = (37 * hash) + ISRECOVERING_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getIsRecovering()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIsRecovering()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1116,46 +1119,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1163,14 +1177,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.GetRegionInfoResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetRegionInfoResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1183,38 +1198,31 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + regionInfo_ = null; } else { regionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); - compactionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; + compactionState_ = 0; bitField0_ = (bitField0_ & ~0x00000002); isRecovering_ = false; bitField0_ = (bitField0_ & ~0x00000004); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetRegionInfoResponse_descriptor; @@ -1257,6 +1265,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse)other); @@ -1277,17 +1311,16 @@ public final class AdminProtos { if (other.hasIsRecovering()) { setIsRecovering(other.getIsRecovering()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionInfo()) { - return false; } if (!getRegionInfo().isInitialized()) { - return false; } return true; @@ -1302,7 +1335,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1312,9 +1345,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionInfo region_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** * required .hbase.pb.RegionInfo region_info = 1; @@ -1327,7 +1359,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { if (regionInfoBuilder_ == null) { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } else { return regionInfoBuilder_.getMessage(); } @@ -1368,6 +1400,7 @@ public final class AdminProtos { public Builder mergeRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + regionInfo_ != null && regionInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); @@ -1386,7 +1419,7 @@ public final class AdminProtos { */ public Builder clearRegionInfo() { if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + regionInfo_ = null; onChanged(); } else { regionInfoBuilder_.clear(); @@ -1409,19 +1442,20 @@ public final class AdminProtos { if (regionInfoBuilder_ != null) { return regionInfoBuilder_.getMessageOrBuilder(); } else { - return regionInfo_; + return regionInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } } /** * required .hbase.pb.RegionInfo region_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - regionInfo_, + getRegionInfo(), getParentForChildren(), isClean()); regionInfo_ = null; @@ -1429,8 +1463,7 @@ public final class AdminProtos { return regionInfoBuilder_; } - // optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; + private int compactionState_ = 0; /** * optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2; */ @@ -1441,7 +1474,8 @@ public final class AdminProtos { * optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() { - return compactionState_; + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState result = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(compactionState_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE : result; } /** * optional .hbase.pb.GetRegionInfoResponse.CompactionState compaction_state = 2; @@ -1451,7 +1485,7 @@ public final class AdminProtos { throw new NullPointerException(); } bitField0_ |= 0x00000002; - compactionState_ = value; + compactionState_ = value.getNumber(); onChanged(); return this; } @@ -1460,12 +1494,11 @@ public final class AdminProtos { */ public Builder clearCompactionState() { bitField0_ = (bitField0_ & ~0x00000002); - compactionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; + compactionState_ = 0; onChanged(); return this; } - // optional bool isRecovering = 3; private boolean isRecovering_ ; /** * optional bool isRecovering = 3; @@ -1497,22 +1530,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetRegionInfoResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetRegionInfoResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetRegionInfoResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetRegionInfoResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetRegionInfoResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetRegionInfoResponse) } - public interface GetStoreFileRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetStoreFileRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetStoreFileRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -1526,7 +1596,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // repeated bytes family = 2; /** * repeated bytes family = 2; */ @@ -1541,44 +1610,36 @@ public final class AdminProtos { com.google.protobuf.ByteString getFamily(int index); } /** - * Protobuf type {@code hbase.pb.GetStoreFileRequest} - * *
    **
    * Get a list of store files for a set of column families in a particular region.
    * If no column family is specified, get the store files for all column families.
    * 
+ * + * Protobuf type {@code hbase.pb.GetStoreFileRequest} */ - public static final class GetStoreFileRequest extends - com.google.protobuf.GeneratedMessage - implements GetStoreFileRequestOrBuilder { + public static final class GetStoreFileRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetStoreFileRequest) + GetStoreFileRequestOrBuilder { // Use GetStoreFileRequest.newBuilder() to construct. - private GetStoreFileRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetStoreFileRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetStoreFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetStoreFileRequest defaultInstance; - public static GetStoreFileRequest getDefaultInstance() { - return defaultInstance; - } - - public GetStoreFileRequest getDefaultInstanceForType() { - return defaultInstance; + private GetStoreFileRequest() { + family_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetStoreFileRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1624,7 +1685,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { family_ = java.util.Collections.unmodifiableList(family_); @@ -1638,30 +1699,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetStoreFileRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetStoreFileRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -1674,16 +1719,15 @@ public final class AdminProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // repeated bytes family = 2; public static final int FAMILY_FIELD_NUMBER = 2; private java.util.List family_; /** @@ -1706,14 +1750,11 @@ public final class AdminProtos { return family_.get(index); } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - family_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -1729,25 +1770,23 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } for (int i = 0; i < family_.size(); i++) { output.writeBytes(2, family_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } { int dataSize = 0; @@ -1758,19 +1797,13 @@ public final class AdminProtos { size += dataSize; size += 1 * getFamilyList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1788,12 +1821,10 @@ public final class AdminProtos { } result = result && getFamilyList() .equals(other.getFamilyList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1809,7 +1840,7 @@ public final class AdminProtos { hash = (37 * hash) + FAMILY_FIELD_NUMBER; hash = (53 * hash) + getFamilyList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1837,67 +1868,79 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.GetStoreFileRequest} - * *
      **
      * Get a list of store files for a set of column families in a particular region.
      * If no column family is specified, get the store files for all column families.
      * 
+ * + * Protobuf type {@code hbase.pb.GetStoreFileRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetStoreFileRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1910,23 +1953,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -1936,10 +1976,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileRequest_descriptor; @@ -1979,6 +2015,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest)other); @@ -2003,17 +2065,16 @@ public final class AdminProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } return true; @@ -2028,7 +2089,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2038,9 +2099,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -2053,7 +2113,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -2094,6 +2154,7 @@ public final class AdminProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -2112,7 +2173,7 @@ public final class AdminProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -2135,19 +2196,20 @@ public final class AdminProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -2155,7 +2217,6 @@ public final class AdminProtos { return regionBuilder_; } - // repeated bytes family = 2; private java.util.List family_ = java.util.Collections.emptyList(); private void ensureFamilyIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { @@ -2213,7 +2274,8 @@ public final class AdminProtos { public Builder addAllFamily( java.lang.Iterable values) { ensureFamilyIsMutable(); - super.addAll(values, family_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, family_); onChanged(); return this; } @@ -2226,27 +2288,64 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetStoreFileRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetStoreFileRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetStoreFileRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetStoreFileRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetStoreFileRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetStoreFileRequest) } - public interface GetStoreFileResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetStoreFileResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetStoreFileResponse) + com.google.protobuf.MessageOrBuilder { - // repeated string store_file = 1; /** * repeated string store_file = 1; */ java.util.List - getStoreFileList(); + getStoreFileList(); /** * repeated string store_file = 1; */ @@ -2264,36 +2363,28 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.GetStoreFileResponse} */ - public static final class GetStoreFileResponse extends - com.google.protobuf.GeneratedMessage - implements GetStoreFileResponseOrBuilder { + public static final class GetStoreFileResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetStoreFileResponse) + GetStoreFileResponseOrBuilder { // Use GetStoreFileResponse.newBuilder() to construct. - private GetStoreFileResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetStoreFileResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetStoreFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetStoreFileResponse defaultInstance; - public static GetStoreFileResponse getDefaultInstance() { - return defaultInstance; } - - public GetStoreFileResponse getDefaultInstanceForType() { - return defaultInstance; + private GetStoreFileResponse() { + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetStoreFileResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2313,11 +2404,12 @@ public final class AdminProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { storeFile_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } - storeFile_.add(input.readBytes()); + storeFile_.add(bs); break; } } @@ -2326,10 +2418,10 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_); + storeFile_ = storeFile_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2340,35 +2432,19 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetStoreFileResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetStoreFileResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated string store_file = 1; public static final int STORE_FILE_FIELD_NUMBER = 1; private com.google.protobuf.LazyStringList storeFile_; /** * repeated string store_file = 1; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getStoreFileList() { return storeFile_; } @@ -2392,13 +2468,11 @@ public final class AdminProtos { return storeFile_.getByteString(index); } - private void initFields() { - storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -2406,41 +2480,32 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < storeFile_.size(); i++) { - output.writeBytes(1, storeFile_.getByteString(i)); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, storeFile_.getRaw(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < storeFile_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(storeFile_.getByteString(i)); + dataSize += computeStringSizeNoTag(storeFile_.getRaw(i)); } size += dataSize; size += 1 * getStoreFileList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2453,12 +2518,10 @@ public final class AdminProtos { boolean result = true; result = result && getStoreFileList() .equals(other.getStoreFileList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2470,7 +2533,7 @@ public final class AdminProtos { hash = (37 * hash) + STORE_FILE_FIELD_NUMBER; hash = (53 * hash) + getStoreFileList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2498,46 +2561,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2545,14 +2619,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.GetStoreFileResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetStoreFileResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2565,18 +2640,15 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; @@ -2584,10 +2656,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetStoreFileResponse_descriptor; @@ -2609,8 +2677,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) == 0x00000001)) { - storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( - storeFile_); + storeFile_ = storeFile_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.storeFile_ = storeFile_; @@ -2618,6 +2685,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse)other); @@ -2639,7 +2732,8 @@ public final class AdminProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -2656,7 +2750,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2666,7 +2760,6 @@ public final class AdminProtos { } private int bitField0_; - // repeated string store_file = 1; private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureStoreFileIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { @@ -2677,9 +2770,9 @@ public final class AdminProtos { /** * repeated string store_file = 1; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getStoreFileList() { - return java.util.Collections.unmodifiableList(storeFile_); + return storeFile_.getUnmodifiableView(); } /** * repeated string store_file = 1; @@ -2732,7 +2825,8 @@ public final class AdminProtos { public Builder addAllStoreFile( java.lang.Iterable values) { ensureStoreFileIsMutable(); - super.addAll(values, storeFile_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, storeFile_); onChanged(); return this; } @@ -2758,54 +2852,83 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetStoreFileResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetStoreFileResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetStoreFileResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse(); } - // @@protoc_insertion_point(class_scope:hbase.pb.GetStoreFileResponse) - } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } - public interface GetOnlineRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetStoreFileResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetStoreFileResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GetOnlineRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetOnlineRegionRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.GetOnlineRegionRequest} */ - public static final class GetOnlineRegionRequest extends - com.google.protobuf.GeneratedMessage - implements GetOnlineRegionRequestOrBuilder { + public static final class GetOnlineRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetOnlineRegionRequest) + GetOnlineRegionRequestOrBuilder { // Use GetOnlineRegionRequest.newBuilder() to construct. - private GetOnlineRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetOnlineRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetOnlineRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetOnlineRegionRequest defaultInstance; - public static GetOnlineRegionRequest getDefaultInstance() { - return defaultInstance; } - - public GetOnlineRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private GetOnlineRegionRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetOnlineRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -2829,7 +2952,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2840,34 +2963,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetOnlineRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetOnlineRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -2875,29 +2982,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2908,12 +3007,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2921,7 +3018,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2949,46 +3046,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2996,14 +3104,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.GetOnlineRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetOnlineRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3016,27 +3125,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionRequest_descriptor; @@ -3060,6 +3162,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest)other); @@ -3071,7 +3199,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -3088,7 +3217,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3096,22 +3225,59 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetOnlineRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetOnlineRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetOnlineRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetOnlineRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetOnlineRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetOnlineRegionRequest) } - public interface GetOnlineRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetOnlineRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetOnlineRegionResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.RegionInfo region_info = 1; /** * repeated .hbase.pb.RegionInfo region_info = 1; */ @@ -3139,36 +3305,28 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.GetOnlineRegionResponse} */ - public static final class GetOnlineRegionResponse extends - com.google.protobuf.GeneratedMessage - implements GetOnlineRegionResponseOrBuilder { + public static final class GetOnlineRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetOnlineRegionResponse) + GetOnlineRegionResponseOrBuilder { // Use GetOnlineRegionResponse.newBuilder() to construct. - private GetOnlineRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetOnlineRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetOnlineRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetOnlineRegionResponse defaultInstance; - public static GetOnlineRegionResponse getDefaultInstance() { - return defaultInstance; } - - public GetOnlineRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private GetOnlineRegionResponse() { + regionInfo_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetOnlineRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3192,7 +3350,8 @@ public final class AdminProtos { regionInfo_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionInfo_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } } @@ -3201,7 +3360,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); @@ -3215,29 +3374,13 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetOnlineRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetOnlineRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.RegionInfo region_info = 1; public static final int REGION_INFO_FIELD_NUMBER = 1; private java.util.List regionInfo_; /** @@ -3273,13 +3416,11 @@ public final class AdminProtos { return regionInfo_.get(index); } - private void initFields() { - regionInfo_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getRegionInfoCount(); i++) { if (!getRegionInfo(i).isInitialized()) { @@ -3293,16 +3434,14 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < regionInfo_.size(); i++) { output.writeMessage(1, regionInfo_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -3310,19 +3449,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, regionInfo_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3335,12 +3468,10 @@ public final class AdminProtos { boolean result = true; result = result && getRegionInfoList() .equals(other.getRegionInfoList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3352,7 +3483,7 @@ public final class AdminProtos { hash = (37 * hash) + REGION_INFO_FIELD_NUMBER; hash = (53 * hash) + getRegionInfoList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3380,46 +3511,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3427,14 +3569,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.GetOnlineRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetOnlineRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3447,19 +3590,16 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionInfoBuilder_ == null) { @@ -3471,10 +3611,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetOnlineRegionResponse_descriptor; @@ -3508,6 +3644,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse)other); @@ -3538,21 +3700,21 @@ public final class AdminProtos { regionInfo_ = other.regionInfo_; bitField0_ = (bitField0_ & ~0x00000001); regionInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionInfoFieldBuilder() : null; } else { regionInfoBuilder_.addAllMessages(other.regionInfo_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getRegionInfoCount(); i++) { if (!getRegionInfo(i).isInitialized()) { - return false; } } @@ -3568,7 +3730,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3578,7 +3740,6 @@ public final class AdminProtos { } private int bitField0_; - // repeated .hbase.pb.RegionInfo region_info = 1; private java.util.List regionInfo_ = java.util.Collections.emptyList(); private void ensureRegionInfoIsMutable() { @@ -3588,7 +3749,7 @@ public final class AdminProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** @@ -3720,7 +3881,8 @@ public final class AdminProtos { java.lang.Iterable values) { if (regionInfoBuilder_ == null) { ensureRegionInfoIsMutable(); - super.addAll(values, regionInfo_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionInfo_); onChanged(); } else { regionInfoBuilder_.addAllMessages(values); @@ -3803,11 +3965,11 @@ public final class AdminProtos { getRegionInfoBuilderList() { return getRegionInfoFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionInfo_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -3817,22 +3979,59 @@ public final class AdminProtos { } return regionInfoBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetOnlineRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetOnlineRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetOnlineRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetOnlineRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetOnlineRegionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetOnlineRegionResponse) } - public interface OpenRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface OpenRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.OpenRegionRequest) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1; /** * repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1; */ @@ -3857,75 +4056,67 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( int index); - // optional uint64 serverStartCode = 2; /** - * optional uint64 serverStartCode = 2; - * *
      * the intended server for this RPC.
      * 
+ * + * optional uint64 serverStartCode = 2; */ boolean hasServerStartCode(); /** - * optional uint64 serverStartCode = 2; - * *
      * the intended server for this RPC.
      * 
+ * + * optional uint64 serverStartCode = 2; */ long getServerStartCode(); - // optional uint64 master_system_time = 5; /** - * optional uint64 master_system_time = 5; - * *
      * wall clock time from master
      * 
+ * + * optional uint64 master_system_time = 5; */ boolean hasMasterSystemTime(); /** - * optional uint64 master_system_time = 5; - * *
      * wall clock time from master
      * 
+ * + * optional uint64 master_system_time = 5; */ long getMasterSystemTime(); } /** * Protobuf type {@code hbase.pb.OpenRegionRequest} */ - public static final class OpenRegionRequest extends - com.google.protobuf.GeneratedMessage - implements OpenRegionRequestOrBuilder { + public static final class OpenRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.OpenRegionRequest) + OpenRegionRequestOrBuilder { // Use OpenRegionRequest.newBuilder() to construct. - private OpenRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private OpenRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private OpenRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final OpenRegionRequest defaultInstance; - public static OpenRegionRequest getDefaultInstance() { - return defaultInstance; } - - public OpenRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private OpenRegionRequest() { + openInfo_ = java.util.Collections.emptyList(); + serverStartCode_ = 0L; + masterSystemTime_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private OpenRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3949,7 +4140,8 @@ public final class AdminProtos { openInfo_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - openInfo_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.PARSER, extensionRegistry)); + openInfo_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.PARSER, extensionRegistry)); break; } case 16: { @@ -3968,7 +4160,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { openInfo_ = java.util.Collections.unmodifiableList(openInfo_); @@ -3982,32 +4174,17 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public OpenRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new OpenRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public interface RegionOpenInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionOpenInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.OpenRegionRequest.RegionOpenInfo) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionInfo region = 1; /** * required .hbase.pb.RegionInfo region = 1; */ @@ -4021,7 +4198,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder(); - // optional uint32 version_of_offline_node = 2; /** * optional uint32 version_of_offline_node = 2; */ @@ -4031,7 +4207,6 @@ public final class AdminProtos { */ int getVersionOfOfflineNode(); - // repeated .hbase.pb.ServerName favored_nodes = 3; /** * repeated .hbase.pb.ServerName favored_nodes = 3; */ @@ -4056,57 +4231,50 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodesOrBuilder( int index); - // optional bool openForDistributedLogReplay = 4; /** - * optional bool openForDistributedLogReplay = 4; - * *
        * open region for distributedLogReplay
        * 
+ * + * optional bool openForDistributedLogReplay = 4; */ boolean hasOpenForDistributedLogReplay(); /** - * optional bool openForDistributedLogReplay = 4; - * *
        * open region for distributedLogReplay
        * 
+ * + * optional bool openForDistributedLogReplay = 4; */ boolean getOpenForDistributedLogReplay(); } /** * Protobuf type {@code hbase.pb.OpenRegionRequest.RegionOpenInfo} */ - public static final class RegionOpenInfo extends - com.google.protobuf.GeneratedMessage - implements RegionOpenInfoOrBuilder { + public static final class RegionOpenInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.OpenRegionRequest.RegionOpenInfo) + RegionOpenInfoOrBuilder { // Use RegionOpenInfo.newBuilder() to construct. - private RegionOpenInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionOpenInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionOpenInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionOpenInfo defaultInstance; - public static RegionOpenInfo getDefaultInstance() { - return defaultInstance; } - - public RegionOpenInfo getDefaultInstanceForType() { - return defaultInstance; + private RegionOpenInfo() { + versionOfOfflineNode_ = 0; + favoredNodes_ = java.util.Collections.emptyList(); + openForDistributedLogReplay_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionOpenInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4148,7 +4316,8 @@ public final class AdminProtos { favoredNodes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - favoredNodes_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); + favoredNodes_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); break; } case 32: { @@ -4162,7 +4331,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { favoredNodes_ = java.util.Collections.unmodifiableList(favoredNodes_); @@ -4176,30 +4345,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionOpenInfo parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionOpenInfo(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionInfo region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo region_; /** @@ -4212,16 +4365,15 @@ public final class AdminProtos { * required .hbase.pb.RegionInfo region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionInfo region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } - // optional uint32 version_of_offline_node = 2; public static final int VERSION_OF_OFFLINE_NODE_FIELD_NUMBER = 2; private int versionOfOfflineNode_; /** @@ -4237,7 +4389,6 @@ public final class AdminProtos { return versionOfOfflineNode_; } - // repeated .hbase.pb.ServerName favored_nodes = 3; public static final int FAVORED_NODES_FIELD_NUMBER = 3; private java.util.List favoredNodes_; /** @@ -4273,40 +4424,34 @@ public final class AdminProtos { return favoredNodes_.get(index); } - // optional bool openForDistributedLogReplay = 4; public static final int OPENFORDISTRIBUTEDLOGREPLAY_FIELD_NUMBER = 4; private boolean openForDistributedLogReplay_; /** - * optional bool openForDistributedLogReplay = 4; - * *
        * open region for distributedLogReplay
        * 
+ * + * optional bool openForDistributedLogReplay = 4; */ public boolean hasOpenForDistributedLogReplay() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional bool openForDistributedLogReplay = 4; - * *
        * open region for distributedLogReplay
        * 
+ * + * optional bool openForDistributedLogReplay = 4; */ public boolean getOpenForDistributedLogReplay() { return openForDistributedLogReplay_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - versionOfOfflineNode_ = 0; - favoredNodes_ = java.util.Collections.emptyList(); - openForDistributedLogReplay_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -4328,9 +4473,8 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, versionOfOfflineNode_); @@ -4341,18 +4485,17 @@ public final class AdminProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(4, openForDistributedLogReplay_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -4366,19 +4509,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, openForDistributedLogReplay_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4406,12 +4543,10 @@ public final class AdminProtos { result = result && (getOpenForDistributedLogReplay() == other.getOpenForDistributedLogReplay()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4433,9 +4568,10 @@ public final class AdminProtos { } if (hasOpenForDistributedLogReplay()) { hash = (37 * hash) + OPENFORDISTRIBUTEDLOGREPLAY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getOpenForDistributedLogReplay()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getOpenForDistributedLogReplay()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4463,46 +4599,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4510,14 +4657,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.OpenRegionRequest.RegionOpenInfo} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.OpenRegionRequest.RegionOpenInfo) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4530,24 +4678,21 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getFavoredNodesFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -4565,10 +4710,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor; @@ -4620,6 +4761,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)other); @@ -4656,7 +4823,7 @@ public final class AdminProtos { favoredNodes_ = other.favoredNodes_; bitField0_ = (bitField0_ & ~0x00000004); favoredNodesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFavoredNodesFieldBuilder() : null; } else { favoredNodesBuilder_.addAllMessages(other.favoredNodes_); @@ -4666,22 +4833,20 @@ public final class AdminProtos { if (other.hasOpenForDistributedLogReplay()) { setOpenForDistributedLogReplay(other.getOpenForDistributedLogReplay()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } for (int i = 0; i < getFavoredNodesCount(); i++) { if (!getFavoredNodes(i).isInitialized()) { - return false; } } @@ -4697,7 +4862,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4707,9 +4872,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionInfo region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionInfo region = 1; @@ -4722,7 +4886,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -4763,6 +4927,7 @@ public final class AdminProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -4781,7 +4946,7 @@ public final class AdminProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -4804,19 +4969,20 @@ public final class AdminProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionInfo region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -4824,7 +4990,6 @@ public final class AdminProtos { return regionBuilder_; } - // optional uint32 version_of_offline_node = 2; private int versionOfOfflineNode_ ; /** * optional uint32 version_of_offline_node = 2; @@ -4857,7 +5022,6 @@ public final class AdminProtos { return this; } - // repeated .hbase.pb.ServerName favored_nodes = 3; private java.util.List favoredNodes_ = java.util.Collections.emptyList(); private void ensureFavoredNodesIsMutable() { @@ -4867,7 +5031,7 @@ public final class AdminProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> favoredNodesBuilder_; /** @@ -4999,7 +5163,8 @@ public final class AdminProtos { java.lang.Iterable values) { if (favoredNodesBuilder_ == null) { ensureFavoredNodesIsMutable(); - super.addAll(values, favoredNodes_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, favoredNodes_); onChanged(); } else { favoredNodesBuilder_.addAllMessages(values); @@ -5082,11 +5247,11 @@ public final class AdminProtos { getFavoredNodesBuilderList() { return getFavoredNodesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodesFieldBuilder() { if (favoredNodesBuilder_ == null) { - favoredNodesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + favoredNodesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( favoredNodes_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -5097,34 +5262,33 @@ public final class AdminProtos { return favoredNodesBuilder_; } - // optional bool openForDistributedLogReplay = 4; private boolean openForDistributedLogReplay_ ; /** - * optional bool openForDistributedLogReplay = 4; - * *
          * open region for distributedLogReplay
          * 
+ * + * optional bool openForDistributedLogReplay = 4; */ public boolean hasOpenForDistributedLogReplay() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional bool openForDistributedLogReplay = 4; - * *
          * open region for distributedLogReplay
          * 
+ * + * optional bool openForDistributedLogReplay = 4; */ public boolean getOpenForDistributedLogReplay() { return openForDistributedLogReplay_; } /** - * optional bool openForDistributedLogReplay = 4; - * *
          * open region for distributedLogReplay
          * 
+ * + * optional bool openForDistributedLogReplay = 4; */ public Builder setOpenForDistributedLogReplay(boolean value) { bitField0_ |= 0x00000008; @@ -5133,11 +5297,11 @@ public final class AdminProtos { return this; } /** - * optional bool openForDistributedLogReplay = 4; - * *
          * open region for distributedLogReplay
          * 
+ * + * optional bool openForDistributedLogReplay = 4; */ public Builder clearOpenForDistributedLogReplay() { bitField0_ = (bitField0_ & ~0x00000008); @@ -5145,20 +5309,56 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.OpenRegionRequest.RegionOpenInfo) } + // @@protoc_insertion_point(class_scope:hbase.pb.OpenRegionRequest.RegionOpenInfo) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo DEFAULT_INSTANCE; static { - defaultInstance = new RegionOpenInfo(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo(); } - // @@protoc_insertion_point(class_scope:hbase.pb.OpenRegionRequest.RegionOpenInfo) - } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } - private int bitField0_; - // repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1; + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionOpenInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionOpenInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private int bitField0_; public static final int OPEN_INFO_FIELD_NUMBER = 1; private java.util.List openInfo_; /** @@ -5194,63 +5394,57 @@ public final class AdminProtos { return openInfo_.get(index); } - // optional uint64 serverStartCode = 2; public static final int SERVERSTARTCODE_FIELD_NUMBER = 2; private long serverStartCode_; /** - * optional uint64 serverStartCode = 2; - * *
      * the intended server for this RPC.
      * 
+ * + * optional uint64 serverStartCode = 2; */ public boolean hasServerStartCode() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint64 serverStartCode = 2; - * *
      * the intended server for this RPC.
      * 
+ * + * optional uint64 serverStartCode = 2; */ public long getServerStartCode() { return serverStartCode_; } - // optional uint64 master_system_time = 5; public static final int MASTER_SYSTEM_TIME_FIELD_NUMBER = 5; private long masterSystemTime_; /** - * optional uint64 master_system_time = 5; - * *
      * wall clock time from master
      * 
+ * + * optional uint64 master_system_time = 5; */ public boolean hasMasterSystemTime() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint64 master_system_time = 5; - * *
      * wall clock time from master
      * 
+ * + * optional uint64 master_system_time = 5; */ public long getMasterSystemTime() { return masterSystemTime_; } - private void initFields() { - openInfo_ = java.util.Collections.emptyList(); - serverStartCode_ = 0L; - masterSystemTime_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getOpenInfoCount(); i++) { if (!getOpenInfo(i).isInitialized()) { @@ -5264,7 +5458,6 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < openInfo_.size(); i++) { output.writeMessage(1, openInfo_.get(i)); } @@ -5274,12 +5467,11 @@ public final class AdminProtos { if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(5, masterSystemTime_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -5295,19 +5487,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(5, masterSystemTime_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5330,12 +5516,10 @@ public final class AdminProtos { result = result && (getMasterSystemTime() == other.getMasterSystemTime()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5349,13 +5533,15 @@ public final class AdminProtos { } if (hasServerStartCode()) { hash = (37 * hash) + SERVERSTARTCODE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getServerStartCode()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getServerStartCode()); } if (hasMasterSystemTime()) { hash = (37 * hash) + MASTER_SYSTEM_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getMasterSystemTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getMasterSystemTime()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5383,46 +5569,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -5430,14 +5627,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.OpenRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.OpenRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5450,19 +5648,16 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getOpenInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (openInfoBuilder_ == null) { @@ -5478,10 +5673,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionRequest_descriptor; @@ -5525,6 +5716,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest)other); @@ -5555,7 +5772,7 @@ public final class AdminProtos { openInfo_ = other.openInfo_; bitField0_ = (bitField0_ & ~0x00000001); openInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getOpenInfoFieldBuilder() : null; } else { openInfoBuilder_.addAllMessages(other.openInfo_); @@ -5568,14 +5785,14 @@ public final class AdminProtos { if (other.hasMasterSystemTime()) { setMasterSystemTime(other.getMasterSystemTime()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getOpenInfoCount(); i++) { if (!getOpenInfo(i).isInitialized()) { - return false; } } @@ -5591,7 +5808,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5601,7 +5818,6 @@ public final class AdminProtos { } private int bitField0_; - // repeated .hbase.pb.OpenRegionRequest.RegionOpenInfo open_info = 1; private java.util.List openInfo_ = java.util.Collections.emptyList(); private void ensureOpenInfoIsMutable() { @@ -5611,7 +5827,7 @@ public final class AdminProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> openInfoBuilder_; /** @@ -5743,7 +5959,8 @@ public final class AdminProtos { java.lang.Iterable values) { if (openInfoBuilder_ == null) { ensureOpenInfoIsMutable(); - super.addAll(values, openInfo_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, openInfo_); onChanged(); } else { openInfoBuilder_.addAllMessages(values); @@ -5826,11 +6043,11 @@ public final class AdminProtos { getOpenInfoBuilderList() { return getOpenInfoFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> getOpenInfoFieldBuilder() { if (openInfoBuilder_ == null) { - openInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + openInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder>( openInfo_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -5841,34 +6058,33 @@ public final class AdminProtos { return openInfoBuilder_; } - // optional uint64 serverStartCode = 2; private long serverStartCode_ ; /** - * optional uint64 serverStartCode = 2; - * *
        * the intended server for this RPC.
        * 
+ * + * optional uint64 serverStartCode = 2; */ public boolean hasServerStartCode() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint64 serverStartCode = 2; - * *
        * the intended server for this RPC.
        * 
+ * + * optional uint64 serverStartCode = 2; */ public long getServerStartCode() { return serverStartCode_; } /** - * optional uint64 serverStartCode = 2; - * *
        * the intended server for this RPC.
        * 
+ * + * optional uint64 serverStartCode = 2; */ public Builder setServerStartCode(long value) { bitField0_ |= 0x00000002; @@ -5877,11 +6093,11 @@ public final class AdminProtos { return this; } /** - * optional uint64 serverStartCode = 2; - * *
        * the intended server for this RPC.
        * 
+ * + * optional uint64 serverStartCode = 2; */ public Builder clearServerStartCode() { bitField0_ = (bitField0_ & ~0x00000002); @@ -5890,34 +6106,33 @@ public final class AdminProtos { return this; } - // optional uint64 master_system_time = 5; private long masterSystemTime_ ; /** - * optional uint64 master_system_time = 5; - * *
        * wall clock time from master
        * 
+ * + * optional uint64 master_system_time = 5; */ public boolean hasMasterSystemTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional uint64 master_system_time = 5; - * *
        * wall clock time from master
        * 
+ * + * optional uint64 master_system_time = 5; */ public long getMasterSystemTime() { return masterSystemTime_; } /** - * optional uint64 master_system_time = 5; - * *
        * wall clock time from master
        * 
+ * + * optional uint64 master_system_time = 5; */ public Builder setMasterSystemTime(long value) { bitField0_ |= 0x00000004; @@ -5926,11 +6141,11 @@ public final class AdminProtos { return this; } /** - * optional uint64 master_system_time = 5; - * *
        * wall clock time from master
        * 
+ * + * optional uint64 master_system_time = 5; */ public Builder clearMasterSystemTime() { bitField0_ = (bitField0_ & ~0x00000004); @@ -5938,22 +6153,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.OpenRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.OpenRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new OpenRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public OpenRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OpenRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.OpenRegionRequest) } - public interface OpenRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface OpenRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.OpenRegionResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; /** * repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; */ @@ -5970,36 +6222,28 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.OpenRegionResponse} */ - public static final class OpenRegionResponse extends - com.google.protobuf.GeneratedMessage - implements OpenRegionResponseOrBuilder { + public static final class OpenRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.OpenRegionResponse) + OpenRegionResponseOrBuilder { // Use OpenRegionResponse.newBuilder() to construct. - private OpenRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private OpenRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private OpenRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final OpenRegionResponse defaultInstance; - public static OpenRegionResponse getDefaultInstance() { - return defaultInstance; } - - public OpenRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private OpenRegionResponse() { + openingState_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private OpenRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6025,10 +6269,10 @@ public final class AdminProtos { unknownFields.mergeVarintField(1, rawValue); } else { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - openingState_ = new java.util.ArrayList(); + openingState_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - openingState_.add(value); + openingState_.add(rawValue); } break; } @@ -6042,10 +6286,10 @@ public final class AdminProtos { unknownFields.mergeVarintField(1, rawValue); } else { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - openingState_ = new java.util.ArrayList(); + openingState_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - openingState_.add(value); + openingState_.add(rawValue); } } input.popLimit(oldLimit); @@ -6057,7 +6301,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { openingState_ = java.util.Collections.unmodifiableList(openingState_); @@ -6071,28 +6315,13 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public OpenRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new OpenRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.OpenRegionResponse.RegionOpeningState} */ @@ -6101,15 +6330,15 @@ public final class AdminProtos { /** * OPENED = 0; */ - OPENED(0, 0), + OPENED(0), /** * ALREADY_OPENED = 1; */ - ALREADY_OPENED(1, 1), + ALREADY_OPENED(1), /** * FAILED_OPENING = 2; */ - FAILED_OPENING(2, 2), + FAILED_OPENING(2), ; /** @@ -6126,9 +6355,19 @@ public final class AdminProtos { public static final int FAILED_OPENING_VALUE = 2; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static RegionOpeningState valueOf(int value) { + return forNumber(value); + } + + public static RegionOpeningState forNumber(int value) { switch (value) { case 0: return OPENED; case 1: return ALREADY_OPENED; @@ -6141,17 +6380,17 @@ public final class AdminProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + RegionOpeningState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public RegionOpeningState findValueByNumber(int number) { - return RegionOpeningState.valueOf(number); + return RegionOpeningState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -6173,25 +6412,32 @@ public final class AdminProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private RegionOpeningState(int index, int value) { - this.index = index; + private RegionOpeningState(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.OpenRegionResponse.RegionOpeningState) } - // repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; public static final int OPENING_STATE_FIELD_NUMBER = 1; - private java.util.List openingState_; + private java.util.List openingState_; + private static final com.google.protobuf.Internal.ListAdapter.Converter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> openingState_converter_ = + new com.google.protobuf.Internal.ListAdapter.Converter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState>() { + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState convert(java.lang.Integer from) { + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState result = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(from); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.OPENED : result; + } + }; /** * repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; */ public java.util.List getOpeningStateList() { - return openingState_; + return new com.google.protobuf.Internal.ListAdapter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState>(openingState_, openingState_converter_); } /** * repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; @@ -6203,16 +6449,14 @@ public final class AdminProtos { * repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { - return openingState_.get(index); + return openingState_converter_.convert(openingState_.get(index)); } - private void initFields() { - openingState_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -6220,16 +6464,14 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < openingState_.size(); i++) { - output.writeEnum(1, openingState_.get(i).getNumber()); + output.writeEnum(1, openingState_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -6237,24 +6479,18 @@ public final class AdminProtos { int dataSize = 0; for (int i = 0; i < openingState_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream - .computeEnumSizeNoTag(openingState_.get(i).getNumber()); + .computeEnumSizeNoTag(openingState_.get(i)); } size += dataSize; size += 1 * openingState_.size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6265,14 +6501,11 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse) obj; boolean result = true; - result = result && getOpeningStateList() - .equals(other.getOpeningStateList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && openingState_.equals(other.openingState_); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6282,9 +6515,9 @@ public final class AdminProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (getOpeningStateCount() > 0) { hash = (37 * hash) + OPENING_STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnumList(getOpeningStateList()); + hash = (53 * hash) + openingState_.hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6312,46 +6545,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6359,14 +6603,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.OpenRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.OpenRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6379,18 +6624,15 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); openingState_ = java.util.Collections.emptyList(); @@ -6398,10 +6640,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_OpenRegionResponse_descriptor; @@ -6431,6 +6669,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse)other); @@ -6452,7 +6716,8 @@ public final class AdminProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -6469,7 +6734,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6479,12 +6744,11 @@ public final class AdminProtos { } private int bitField0_; - // repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; - private java.util.List openingState_ = + private java.util.List openingState_ = java.util.Collections.emptyList(); private void ensureOpeningStateIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - openingState_ = new java.util.ArrayList(openingState_); + openingState_ = new java.util.ArrayList(openingState_); bitField0_ |= 0x00000001; } } @@ -6492,7 +6756,8 @@ public final class AdminProtos { * repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; */ public java.util.List getOpeningStateList() { - return java.util.Collections.unmodifiableList(openingState_); + return new com.google.protobuf.Internal.ListAdapter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState>(openingState_, openingState_converter_); } /** * repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; @@ -6504,7 +6769,7 @@ public final class AdminProtos { * repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { - return openingState_.get(index); + return openingState_converter_.convert(openingState_.get(index)); } /** * repeated .hbase.pb.OpenRegionResponse.RegionOpeningState opening_state = 1; @@ -6515,7 +6780,7 @@ public final class AdminProtos { throw new NullPointerException(); } ensureOpeningStateIsMutable(); - openingState_.set(index, value); + openingState_.set(index, value.getNumber()); onChanged(); return this; } @@ -6527,7 +6792,7 @@ public final class AdminProtos { throw new NullPointerException(); } ensureOpeningStateIsMutable(); - openingState_.add(value); + openingState_.add(value.getNumber()); onChanged(); return this; } @@ -6537,7 +6802,9 @@ public final class AdminProtos { public Builder addAllOpeningState( java.lang.Iterable values) { ensureOpeningStateIsMutable(); - super.addAll(values, openingState_); + for (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value : values) { + openingState_.add(value.getNumber()); + } onChanged(); return this; } @@ -6550,22 +6817,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.OpenRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.OpenRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new OpenRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public OpenRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OpenRegionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.OpenRegionResponse) } - public interface WarmupRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface WarmupRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.WarmupRegionRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionInfo regionInfo = 1; /** * required .hbase.pb.RegionInfo regionInfo = 1; */ @@ -6582,36 +6886,27 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.WarmupRegionRequest} */ - public static final class WarmupRegionRequest extends - com.google.protobuf.GeneratedMessage - implements WarmupRegionRequestOrBuilder { + public static final class WarmupRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.WarmupRegionRequest) + WarmupRegionRequestOrBuilder { // Use WarmupRegionRequest.newBuilder() to construct. - private WarmupRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private WarmupRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private WarmupRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final WarmupRegionRequest defaultInstance; - public static WarmupRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public WarmupRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private WarmupRegionRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private WarmupRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6649,7 +6944,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -6660,30 +6955,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WarmupRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new WarmupRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionInfo regionInfo = 1; public static final int REGIONINFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; /** @@ -6696,22 +6975,20 @@ public final class AdminProtos { * required .hbase.pb.RegionInfo regionInfo = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } /** * required .hbase.pb.RegionInfo regionInfo = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } - private void initFields() { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegionInfo()) { memoizedIsInitialized = 0; @@ -6727,36 +7004,28 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionInfo_); + output.writeMessage(1, getRegionInfo()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionInfo_); + .computeMessageSize(1, getRegionInfo()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6772,12 +7041,10 @@ public final class AdminProtos { result = result && getRegionInfo() .equals(other.getRegionInfo()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6789,7 +7056,7 @@ public final class AdminProtos { hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; hash = (53 * hash) + getRegionInfo().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6817,46 +7084,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6864,14 +7142,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.WarmupRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.WarmupRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6884,23 +7163,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + regionInfo_ = null; } else { regionInfoBuilder_.clear(); } @@ -6908,10 +7184,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionRequest_descriptor; @@ -6946,6 +7218,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest)other); @@ -6960,17 +7258,16 @@ public final class AdminProtos { if (other.hasRegionInfo()) { mergeRegionInfo(other.getRegionInfo()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionInfo()) { - return false; } if (!getRegionInfo().isInitialized()) { - return false; } return true; @@ -6985,7 +7282,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6995,9 +7292,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionInfo regionInfo = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** * required .hbase.pb.RegionInfo regionInfo = 1; @@ -7010,7 +7306,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { if (regionInfoBuilder_ == null) { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } else { return regionInfoBuilder_.getMessage(); } @@ -7051,6 +7347,7 @@ public final class AdminProtos { public Builder mergeRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + regionInfo_ != null && regionInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); @@ -7069,7 +7366,7 @@ public final class AdminProtos { */ public Builder clearRegionInfo() { if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + regionInfo_ = null; onChanged(); } else { regionInfoBuilder_.clear(); @@ -7092,73 +7389,103 @@ public final class AdminProtos { if (regionInfoBuilder_ != null) { return regionInfoBuilder_.getMessageOrBuilder(); } else { - return regionInfo_; + return regionInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } } /** * required .hbase.pb.RegionInfo regionInfo = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - regionInfo_, + getRegionInfo(), getParentForChildren(), isClean()); regionInfo_ = null; } return regionInfoBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.WarmupRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.WarmupRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new WarmupRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public WarmupRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WarmupRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.WarmupRegionRequest) } - public interface WarmupRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface WarmupRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.WarmupRegionResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.WarmupRegionResponse} */ - public static final class WarmupRegionResponse extends - com.google.protobuf.GeneratedMessage - implements WarmupRegionResponseOrBuilder { + public static final class WarmupRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.WarmupRegionResponse) + WarmupRegionResponseOrBuilder { // Use WarmupRegionResponse.newBuilder() to construct. - private WarmupRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private WarmupRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private WarmupRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final WarmupRegionResponse defaultInstance; - public static WarmupRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public WarmupRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private WarmupRegionResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private WarmupRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -7182,7 +7509,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7193,34 +7520,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WarmupRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new WarmupRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -7228,29 +7539,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7261,12 +7564,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7274,7 +7575,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7302,46 +7603,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -7349,14 +7661,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.WarmupRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.WarmupRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7369,27 +7682,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WarmupRegionResponse_descriptor; @@ -7413,6 +7719,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse)other); @@ -7424,7 +7756,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -7441,7 +7774,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7449,22 +7782,59 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.WarmupRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.WarmupRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new WarmupRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public WarmupRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WarmupRegionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.WarmupRegionResponse) } - public interface CloseRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CloseRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CloseRegionRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -7478,7 +7848,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // optional uint32 version_of_closing_node = 2; /** * optional uint32 version_of_closing_node = 2; */ @@ -7488,7 +7857,6 @@ public final class AdminProtos { */ int getVersionOfClosingNode(); - // optional bool transition_in_ZK = 3 [default = true]; /** * optional bool transition_in_ZK = 3 [default = true]; */ @@ -7498,7 +7866,6 @@ public final class AdminProtos { */ boolean getTransitionInZK(); - // optional .hbase.pb.ServerName destination_server = 4; /** * optional .hbase.pb.ServerName destination_server = 4; */ @@ -7512,63 +7879,56 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder(); - // optional uint64 serverStartCode = 5; /** - * optional uint64 serverStartCode = 5; - * *
      * the intended server for this RPC.
      * 
+ * + * optional uint64 serverStartCode = 5; */ boolean hasServerStartCode(); /** - * optional uint64 serverStartCode = 5; - * *
      * the intended server for this RPC.
      * 
+ * + * optional uint64 serverStartCode = 5; */ long getServerStartCode(); } /** - * Protobuf type {@code hbase.pb.CloseRegionRequest} - * *
    **
    * Closes the specified region and will use or not use ZK during the close
    * according to the specified flag.
    * 
+ * + * Protobuf type {@code hbase.pb.CloseRegionRequest} */ - public static final class CloseRegionRequest extends - com.google.protobuf.GeneratedMessage - implements CloseRegionRequestOrBuilder { + public static final class CloseRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CloseRegionRequest) + CloseRegionRequestOrBuilder { // Use CloseRegionRequest.newBuilder() to construct. - private CloseRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private CloseRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CloseRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CloseRegionRequest defaultInstance; - public static CloseRegionRequest getDefaultInstance() { - return defaultInstance; } - - public CloseRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private CloseRegionRequest() { + versionOfClosingNode_ = 0; + transitionInZK_ = true; + serverStartCode_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CloseRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7634,7 +7994,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7645,30 +8005,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CloseRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CloseRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -7681,16 +8025,15 @@ public final class AdminProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // optional uint32 version_of_closing_node = 2; public static final int VERSION_OF_CLOSING_NODE_FIELD_NUMBER = 2; private int versionOfClosingNode_; /** @@ -7706,7 +8049,6 @@ public final class AdminProtos { return versionOfClosingNode_; } - // optional bool transition_in_ZK = 3 [default = true]; public static final int TRANSITION_IN_ZK_FIELD_NUMBER = 3; private boolean transitionInZK_; /** @@ -7722,7 +8064,6 @@ public final class AdminProtos { return transitionInZK_; } - // optional .hbase.pb.ServerName destination_server = 4; public static final int DESTINATION_SERVER_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName destinationServer_; /** @@ -7735,50 +8076,43 @@ public final class AdminProtos { * optional .hbase.pb.ServerName destination_server = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getDestinationServer() { - return destinationServer_; + return destinationServer_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : destinationServer_; } /** * optional .hbase.pb.ServerName destination_server = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() { - return destinationServer_; + return destinationServer_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : destinationServer_; } - // optional uint64 serverStartCode = 5; public static final int SERVERSTARTCODE_FIELD_NUMBER = 5; private long serverStartCode_; /** - * optional uint64 serverStartCode = 5; - * *
      * the intended server for this RPC.
      * 
+ * + * optional uint64 serverStartCode = 5; */ public boolean hasServerStartCode() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional uint64 serverStartCode = 5; - * *
      * the intended server for this RPC.
      * 
+ * + * optional uint64 serverStartCode = 5; */ public long getServerStartCode() { return serverStartCode_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - versionOfClosingNode_ = 0; - transitionInZK_ = true; - destinationServer_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - serverStartCode_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -7800,9 +8134,8 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, versionOfClosingNode_); @@ -7811,23 +8144,22 @@ public final class AdminProtos { output.writeBool(3, transitionInZK_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, destinationServer_); + output.writeMessage(4, getDestinationServer()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeUInt64(5, serverStartCode_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -7839,25 +8171,19 @@ public final class AdminProtos { } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, destinationServer_); + .computeMessageSize(4, getDestinationServer()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(5, serverStartCode_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7893,12 +8219,10 @@ public final class AdminProtos { result = result && (getServerStartCode() == other.getServerStartCode()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7916,7 +8240,8 @@ public final class AdminProtos { } if (hasTransitionInZK()) { hash = (37 * hash) + TRANSITION_IN_ZK_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getTransitionInZK()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getTransitionInZK()); } if (hasDestinationServer()) { hash = (37 * hash) + DESTINATION_SERVER_FIELD_NUMBER; @@ -7924,9 +8249,10 @@ public final class AdminProtos { } if (hasServerStartCode()) { hash = (37 * hash) + SERVERSTARTCODE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getServerStartCode()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getServerStartCode()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7954,67 +8280,79 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.CloseRegionRequest} - * *
      **
      * Closes the specified region and will use or not use ZK during the close
      * according to the specified flag.
      * 
+ * + * Protobuf type {@code hbase.pb.CloseRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CloseRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8027,24 +8365,21 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getDestinationServerFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -8054,7 +8389,7 @@ public final class AdminProtos { transitionInZK_ = true; bitField0_ = (bitField0_ & ~0x00000004); if (destinationServerBuilder_ == null) { - destinationServer_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + destinationServer_ = null; } else { destinationServerBuilder_.clear(); } @@ -8064,10 +8399,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionRequest_descriptor; @@ -8122,6 +8453,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest)other); @@ -8148,22 +8505,20 @@ public final class AdminProtos { if (other.hasServerStartCode()) { setServerStartCode(other.getServerStartCode()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } if (hasDestinationServer()) { if (!getDestinationServer().isInitialized()) { - return false; } } @@ -8179,7 +8534,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8189,9 +8544,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -8204,7 +8558,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -8245,6 +8599,7 @@ public final class AdminProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -8263,7 +8618,7 @@ public final class AdminProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -8286,19 +8641,20 @@ public final class AdminProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -8306,7 +8662,6 @@ public final class AdminProtos { return regionBuilder_; } - // optional uint32 version_of_closing_node = 2; private int versionOfClosingNode_ ; /** * optional uint32 version_of_closing_node = 2; @@ -8339,7 +8694,6 @@ public final class AdminProtos { return this; } - // optional bool transition_in_ZK = 3 [default = true]; private boolean transitionInZK_ = true; /** * optional bool transition_in_ZK = 3 [default = true]; @@ -8372,9 +8726,8 @@ public final class AdminProtos { return this; } - // optional .hbase.pb.ServerName destination_server = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName destinationServer_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName destinationServer_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destinationServerBuilder_; /** * optional .hbase.pb.ServerName destination_server = 4; @@ -8387,7 +8740,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getDestinationServer() { if (destinationServerBuilder_ == null) { - return destinationServer_; + return destinationServer_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : destinationServer_; } else { return destinationServerBuilder_.getMessage(); } @@ -8428,6 +8781,7 @@ public final class AdminProtos { public Builder mergeDestinationServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (destinationServerBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + destinationServer_ != null && destinationServer_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { destinationServer_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(destinationServer_).mergeFrom(value).buildPartial(); @@ -8446,7 +8800,7 @@ public final class AdminProtos { */ public Builder clearDestinationServer() { if (destinationServerBuilder_ == null) { - destinationServer_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + destinationServer_ = null; onChanged(); } else { destinationServerBuilder_.clear(); @@ -8469,19 +8823,20 @@ public final class AdminProtos { if (destinationServerBuilder_ != null) { return destinationServerBuilder_.getMessageOrBuilder(); } else { - return destinationServer_; + return destinationServer_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : destinationServer_; } } /** * optional .hbase.pb.ServerName destination_server = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getDestinationServerFieldBuilder() { if (destinationServerBuilder_ == null) { - destinationServerBuilder_ = new com.google.protobuf.SingleFieldBuilder< + destinationServerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - destinationServer_, + getDestinationServer(), getParentForChildren(), isClean()); destinationServer_ = null; @@ -8489,34 +8844,33 @@ public final class AdminProtos { return destinationServerBuilder_; } - // optional uint64 serverStartCode = 5; private long serverStartCode_ ; /** - * optional uint64 serverStartCode = 5; - * *
        * the intended server for this RPC.
        * 
+ * + * optional uint64 serverStartCode = 5; */ public boolean hasServerStartCode() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional uint64 serverStartCode = 5; - * *
        * the intended server for this RPC.
        * 
+ * + * optional uint64 serverStartCode = 5; */ public long getServerStartCode() { return serverStartCode_; } /** - * optional uint64 serverStartCode = 5; - * *
        * the intended server for this RPC.
        * 
+ * + * optional uint64 serverStartCode = 5; */ public Builder setServerStartCode(long value) { bitField0_ |= 0x00000010; @@ -8525,11 +8879,11 @@ public final class AdminProtos { return this; } /** - * optional uint64 serverStartCode = 5; - * *
        * the intended server for this RPC.
        * 
+ * + * optional uint64 serverStartCode = 5; */ public Builder clearServerStartCode() { bitField0_ = (bitField0_ & ~0x00000010); @@ -8537,22 +8891,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CloseRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.CloseRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new CloseRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CloseRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CloseRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CloseRegionRequest) } - public interface CloseRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CloseRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CloseRegionResponse) + com.google.protobuf.MessageOrBuilder { - // required bool closed = 1; /** * required bool closed = 1; */ @@ -8565,36 +8956,28 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.CloseRegionResponse} */ - public static final class CloseRegionResponse extends - com.google.protobuf.GeneratedMessage - implements CloseRegionResponseOrBuilder { + public static final class CloseRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CloseRegionResponse) + CloseRegionResponseOrBuilder { // Use CloseRegionResponse.newBuilder() to construct. - private CloseRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private CloseRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CloseRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CloseRegionResponse defaultInstance; - public static CloseRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public CloseRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private CloseRegionResponse() { + closed_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CloseRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8624,7 +9007,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -8635,30 +9018,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CloseRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CloseRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool closed = 1; public static final int CLOSED_FIELD_NUMBER = 1; private boolean closed_; /** @@ -8674,13 +9041,11 @@ public final class AdminProtos { return closed_; } - private void initFields() { - closed_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasClosed()) { memoizedIsInitialized = 0; @@ -8692,16 +9057,14 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, closed_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -8709,19 +9072,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, closed_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8737,12 +9094,10 @@ public final class AdminProtos { result = result && (getClosed() == other.getClosed()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -8752,9 +9107,10 @@ public final class AdminProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasClosed()) { hash = (37 * hash) + CLOSED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getClosed()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getClosed()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -8782,46 +9138,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -8829,14 +9196,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.CloseRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CloseRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8849,18 +9217,15 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); closed_ = false; @@ -8868,10 +9233,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CloseRegionResponse_descriptor; @@ -8902,6 +9263,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse)other); @@ -8916,13 +9303,13 @@ public final class AdminProtos { if (other.hasClosed()) { setClosed(other.getClosed()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasClosed()) { - return false; } return true; @@ -8937,7 +9324,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8947,7 +9334,6 @@ public final class AdminProtos { } private int bitField0_; - // required bool closed = 1; private boolean closed_ ; /** * required bool closed = 1; @@ -8979,22 +9365,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CloseRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.CloseRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new CloseRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CloseRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CloseRegionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CloseRegionResponse) } - public interface FlushRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FlushRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FlushRegionRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -9008,7 +9431,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // optional uint64 if_older_than_ts = 2; /** * optional uint64 if_older_than_ts = 2; */ @@ -9018,64 +9440,56 @@ public final class AdminProtos { */ long getIfOlderThanTs(); - // optional bool write_flush_wal_marker = 3; /** - * optional bool write_flush_wal_marker = 3; - * *
      * whether to write a marker to WAL even if not flushed
      * 
+ * + * optional bool write_flush_wal_marker = 3; */ boolean hasWriteFlushWalMarker(); /** - * optional bool write_flush_wal_marker = 3; - * *
      * whether to write a marker to WAL even if not flushed
      * 
+ * + * optional bool write_flush_wal_marker = 3; */ boolean getWriteFlushWalMarker(); } /** - * Protobuf type {@code hbase.pb.FlushRegionRequest} - * *
    **
    * Flushes the MemStore of the specified region.
    * <p>
    * This method is synchronous.
    * 
+ * + * Protobuf type {@code hbase.pb.FlushRegionRequest} */ - public static final class FlushRegionRequest extends - com.google.protobuf.GeneratedMessage - implements FlushRegionRequestOrBuilder { + public static final class FlushRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FlushRegionRequest) + FlushRegionRequestOrBuilder { // Use FlushRegionRequest.newBuilder() to construct. - private FlushRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private FlushRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FlushRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FlushRegionRequest defaultInstance; - public static FlushRegionRequest getDefaultInstance() { - return defaultInstance; } - - public FlushRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private FlushRegionRequest() { + ifOlderThanTs_ = 0L; + writeFlushWalMarker_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FlushRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -9123,7 +9537,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -9134,30 +9548,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FlushRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FlushRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -9170,16 +9568,15 @@ public final class AdminProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // optional uint64 if_older_than_ts = 2; public static final int IF_OLDER_THAN_TS_FIELD_NUMBER = 2; private long ifOlderThanTs_; /** @@ -9195,39 +9592,34 @@ public final class AdminProtos { return ifOlderThanTs_; } - // optional bool write_flush_wal_marker = 3; public static final int WRITE_FLUSH_WAL_MARKER_FIELD_NUMBER = 3; private boolean writeFlushWalMarker_; /** - * optional bool write_flush_wal_marker = 3; - * *
      * whether to write a marker to WAL even if not flushed
      * 
+ * + * optional bool write_flush_wal_marker = 3; */ public boolean hasWriteFlushWalMarker() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional bool write_flush_wal_marker = 3; - * *
      * whether to write a marker to WAL even if not flushed
      * 
+ * + * optional bool write_flush_wal_marker = 3; */ public boolean getWriteFlushWalMarker() { return writeFlushWalMarker_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - ifOlderThanTs_ = 0L; - writeFlushWalMarker_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -9243,9 +9635,8 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, ifOlderThanTs_); @@ -9253,18 +9644,17 @@ public final class AdminProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, writeFlushWalMarker_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -9274,19 +9664,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, writeFlushWalMarker_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9312,12 +9696,10 @@ public final class AdminProtos { result = result && (getWriteFlushWalMarker() == other.getWriteFlushWalMarker()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9331,13 +9713,15 @@ public final class AdminProtos { } if (hasIfOlderThanTs()) { hash = (37 * hash) + IF_OLDER_THAN_TS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getIfOlderThanTs()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getIfOlderThanTs()); } if (hasWriteFlushWalMarker()) { hash = (37 * hash) + WRITE_FLUSH_WAL_MARKER_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getWriteFlushWalMarker()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getWriteFlushWalMarker()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9365,68 +9749,80 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.FlushRegionRequest} - * *
      **
      * Flushes the MemStore of the specified region.
      * <p>
      * This method is synchronous.
      * 
+ * + * Protobuf type {@code hbase.pb.FlushRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FlushRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9439,23 +9835,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -9467,10 +9860,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionRequest_descriptor; @@ -9513,6 +9902,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest)other); @@ -9533,17 +9948,16 @@ public final class AdminProtos { if (other.hasWriteFlushWalMarker()) { setWriteFlushWalMarker(other.getWriteFlushWalMarker()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } return true; @@ -9558,7 +9972,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9568,9 +9982,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -9583,7 +9996,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -9624,6 +10037,7 @@ public final class AdminProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -9642,7 +10056,7 @@ public final class AdminProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -9665,19 +10079,20 @@ public final class AdminProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -9685,7 +10100,6 @@ public final class AdminProtos { return regionBuilder_; } - // optional uint64 if_older_than_ts = 2; private long ifOlderThanTs_ ; /** * optional uint64 if_older_than_ts = 2; @@ -9718,34 +10132,33 @@ public final class AdminProtos { return this; } - // optional bool write_flush_wal_marker = 3; private boolean writeFlushWalMarker_ ; /** - * optional bool write_flush_wal_marker = 3; - * *
        * whether to write a marker to WAL even if not flushed
        * 
+ * + * optional bool write_flush_wal_marker = 3; */ public boolean hasWriteFlushWalMarker() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional bool write_flush_wal_marker = 3; - * *
        * whether to write a marker to WAL even if not flushed
        * 
+ * + * optional bool write_flush_wal_marker = 3; */ public boolean getWriteFlushWalMarker() { return writeFlushWalMarker_; } /** - * optional bool write_flush_wal_marker = 3; - * *
        * whether to write a marker to WAL even if not flushed
        * 
+ * + * optional bool write_flush_wal_marker = 3; */ public Builder setWriteFlushWalMarker(boolean value) { bitField0_ |= 0x00000004; @@ -9754,11 +10167,11 @@ public final class AdminProtos { return this; } /** - * optional bool write_flush_wal_marker = 3; - * *
        * whether to write a marker to WAL even if not flushed
        * 
+ * + * optional bool write_flush_wal_marker = 3; */ public Builder clearWriteFlushWalMarker() { bitField0_ = (bitField0_ & ~0x00000004); @@ -9766,22 +10179,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FlushRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.FlushRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new FlushRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FlushRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FlushRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FlushRegionRequest) } - public interface FlushRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FlushRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FlushRegionResponse) + com.google.protobuf.MessageOrBuilder { - // required uint64 last_flush_time = 1; /** * required uint64 last_flush_time = 1; */ @@ -9791,7 +10241,6 @@ public final class AdminProtos { */ long getLastFlushTime(); - // optional bool flushed = 2; /** * optional bool flushed = 2; */ @@ -9801,7 +10250,6 @@ public final class AdminProtos { */ boolean getFlushed(); - // optional bool wrote_flush_wal_marker = 3; /** * optional bool wrote_flush_wal_marker = 3; */ @@ -9814,36 +10262,30 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.FlushRegionResponse} */ - public static final class FlushRegionResponse extends - com.google.protobuf.GeneratedMessage - implements FlushRegionResponseOrBuilder { + public static final class FlushRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FlushRegionResponse) + FlushRegionResponseOrBuilder { // Use FlushRegionResponse.newBuilder() to construct. - private FlushRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private FlushRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FlushRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FlushRegionResponse defaultInstance; - public static FlushRegionResponse getDefaultInstance() { - return defaultInstance; } - - public FlushRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private FlushRegionResponse() { + lastFlushTime_ = 0L; + flushed_ = false; + wroteFlushWalMarker_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FlushRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -9883,7 +10325,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -9894,30 +10336,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FlushRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FlushRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint64 last_flush_time = 1; public static final int LAST_FLUSH_TIME_FIELD_NUMBER = 1; private long lastFlushTime_; /** @@ -9933,7 +10359,6 @@ public final class AdminProtos { return lastFlushTime_; } - // optional bool flushed = 2; public static final int FLUSHED_FIELD_NUMBER = 2; private boolean flushed_; /** @@ -9949,7 +10374,6 @@ public final class AdminProtos { return flushed_; } - // optional bool wrote_flush_wal_marker = 3; public static final int WROTE_FLUSH_WAL_MARKER_FIELD_NUMBER = 3; private boolean wroteFlushWalMarker_; /** @@ -9965,15 +10389,11 @@ public final class AdminProtos { return wroteFlushWalMarker_; } - private void initFields() { - lastFlushTime_ = 0L; - flushed_ = false; - wroteFlushWalMarker_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasLastFlushTime()) { memoizedIsInitialized = 0; @@ -9985,7 +10405,6 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, lastFlushTime_); } @@ -9995,12 +10414,11 @@ public final class AdminProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, wroteFlushWalMarker_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -10016,19 +10434,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, wroteFlushWalMarker_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -10054,12 +10466,10 @@ public final class AdminProtos { result = result && (getWroteFlushWalMarker() == other.getWroteFlushWalMarker()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -10069,17 +10479,20 @@ public final class AdminProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLastFlushTime()) { hash = (37 * hash) + LAST_FLUSH_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLastFlushTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLastFlushTime()); } if (hasFlushed()) { hash = (37 * hash) + FLUSHED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getFlushed()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getFlushed()); } if (hasWroteFlushWalMarker()) { hash = (37 * hash) + WROTE_FLUSH_WAL_MARKER_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getWroteFlushWalMarker()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getWroteFlushWalMarker()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -10107,46 +10520,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -10154,14 +10578,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.FlushRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FlushRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10174,18 +10599,15 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); lastFlushTime_ = 0L; @@ -10197,10 +10619,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_FlushRegionResponse_descriptor; @@ -10239,6 +10657,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse)other); @@ -10259,13 +10703,13 @@ public final class AdminProtos { if (other.hasWroteFlushWalMarker()) { setWroteFlushWalMarker(other.getWroteFlushWalMarker()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasLastFlushTime()) { - return false; } return true; @@ -10280,7 +10724,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -10290,7 +10734,6 @@ public final class AdminProtos { } private int bitField0_; - // required uint64 last_flush_time = 1; private long lastFlushTime_ ; /** * required uint64 last_flush_time = 1; @@ -10323,7 +10766,6 @@ public final class AdminProtos { return this; } - // optional bool flushed = 2; private boolean flushed_ ; /** * optional bool flushed = 2; @@ -10356,7 +10798,6 @@ public final class AdminProtos { return this; } - // optional bool wrote_flush_wal_marker = 3; private boolean wroteFlushWalMarker_ ; /** * optional bool wrote_flush_wal_marker = 3; @@ -10388,22 +10829,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FlushRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.FlushRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new FlushRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FlushRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FlushRegionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FlushRegionResponse) } - public interface SplitRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SplitRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SplitRegionRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -10417,7 +10895,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // optional bytes split_point = 2; /** * optional bytes split_point = 2; */ @@ -10428,8 +10905,6 @@ public final class AdminProtos { com.google.protobuf.ByteString getSplitPoint(); } /** - * Protobuf type {@code hbase.pb.SplitRegionRequest} - * *
    **
    * Splits the specified region.
@@ -10438,37 +10913,31 @@ public final class AdminProtos {
    * will then trigger a split.  The flush is done synchronously but the
    * compaction is asynchronous.
    * 
+ * + * Protobuf type {@code hbase.pb.SplitRegionRequest} */ - public static final class SplitRegionRequest extends - com.google.protobuf.GeneratedMessage - implements SplitRegionRequestOrBuilder { + public static final class SplitRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SplitRegionRequest) + SplitRegionRequestOrBuilder { // Use SplitRegionRequest.newBuilder() to construct. - private SplitRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private SplitRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SplitRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SplitRegionRequest defaultInstance; - public static SplitRegionRequest getDefaultInstance() { - return defaultInstance; } - - public SplitRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private SplitRegionRequest() { + splitPoint_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SplitRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -10511,7 +10980,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -10522,30 +10991,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SplitRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SplitRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -10558,16 +11011,15 @@ public final class AdminProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // optional bytes split_point = 2; public static final int SPLIT_POINT_FIELD_NUMBER = 2; private com.google.protobuf.ByteString splitPoint_; /** @@ -10583,14 +11035,11 @@ public final class AdminProtos { return splitPoint_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - splitPoint_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -10606,43 +11055,35 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, splitPoint_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, splitPoint_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -10663,12 +11104,10 @@ public final class AdminProtos { result = result && getSplitPoint() .equals(other.getSplitPoint()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -10684,7 +11123,7 @@ public final class AdminProtos { hash = (37 * hash) + SPLIT_POINT_FIELD_NUMBER; hash = (53 * hash) + getSplitPoint().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -10712,52 +11151,61 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.SplitRegionRequest} - * *
      **
      * Splits the specified region.
@@ -10766,16 +11214,19 @@ public final class AdminProtos {
      * will then trigger a split.  The flush is done synchronously but the
      * compaction is asynchronous.
      * 
+ * + * Protobuf type {@code hbase.pb.SplitRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SplitRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10788,23 +11239,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -10814,10 +11262,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionRequest_descriptor; @@ -10856,6 +11300,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest)other); @@ -10873,17 +11343,16 @@ public final class AdminProtos { if (other.hasSplitPoint()) { setSplitPoint(other.getSplitPoint()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } return true; @@ -10898,7 +11367,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -10908,9 +11377,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -10923,7 +11391,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -10964,6 +11432,7 @@ public final class AdminProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -10982,7 +11451,7 @@ public final class AdminProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -11005,19 +11474,20 @@ public final class AdminProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -11025,7 +11495,6 @@ public final class AdminProtos { return regionBuilder_; } - // optional bytes split_point = 2; private com.google.protobuf.ByteString splitPoint_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes split_point = 2; @@ -11060,54 +11529,83 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SplitRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.SplitRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new SplitRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SplitRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SplitRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SplitRegionRequest) } - public interface SplitRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SplitRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SplitRegionResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.SplitRegionResponse} */ - public static final class SplitRegionResponse extends - com.google.protobuf.GeneratedMessage - implements SplitRegionResponseOrBuilder { + public static final class SplitRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SplitRegionResponse) + SplitRegionResponseOrBuilder { // Use SplitRegionResponse.newBuilder() to construct. - private SplitRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private SplitRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SplitRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SplitRegionResponse defaultInstance; - public static SplitRegionResponse getDefaultInstance() { - return defaultInstance; } - - public SplitRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private SplitRegionResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SplitRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -11131,7 +11629,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11142,34 +11640,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SplitRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SplitRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -11177,29 +11659,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -11210,12 +11684,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -11223,7 +11695,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -11251,46 +11723,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -11298,14 +11781,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.SplitRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SplitRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11318,27 +11802,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_SplitRegionResponse_descriptor; @@ -11362,6 +11839,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse)other); @@ -11373,7 +11876,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -11390,7 +11894,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11398,24 +11902,61 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SplitRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.SplitRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new SplitRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse(); } - // @@protoc_insertion_point(class_scope:hbase.pb.SplitRegionResponse) - } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } - public interface CompactRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SplitRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SplitRegionResponse(input, extensionRegistry); + } + }; - // required .hbase.pb.RegionSpecifier region = 1; - /** - * required .hbase.pb.RegionSpecifier region = 1; + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CompactRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CompactRegionRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * required .hbase.pb.RegionSpecifier region = 1; */ boolean hasRegion(); /** @@ -11427,7 +11968,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // optional bool major = 2; /** * optional bool major = 2; */ @@ -11437,7 +11977,6 @@ public final class AdminProtos { */ boolean getMajor(); - // optional bytes family = 3; /** * optional bytes family = 3; */ @@ -11448,45 +11987,38 @@ public final class AdminProtos { com.google.protobuf.ByteString getFamily(); } /** - * Protobuf type {@code hbase.pb.CompactRegionRequest} - * *
    **
    * Compacts the specified region.  Performs a major compaction if specified.
    * <p>
    * This method is asynchronous.
    * 
+ * + * Protobuf type {@code hbase.pb.CompactRegionRequest} */ - public static final class CompactRegionRequest extends - com.google.protobuf.GeneratedMessage - implements CompactRegionRequestOrBuilder { + public static final class CompactRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CompactRegionRequest) + CompactRegionRequestOrBuilder { // Use CompactRegionRequest.newBuilder() to construct. - private CompactRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private CompactRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CompactRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CompactRegionRequest defaultInstance; - public static CompactRegionRequest getDefaultInstance() { - return defaultInstance; } - - public CompactRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private CompactRegionRequest() { + major_ = false; + family_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CompactRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -11534,7 +12066,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11545,30 +12077,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CompactRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CompactRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -11581,16 +12097,15 @@ public final class AdminProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // optional bool major = 2; public static final int MAJOR_FIELD_NUMBER = 2; private boolean major_; /** @@ -11606,7 +12121,6 @@ public final class AdminProtos { return major_; } - // optional bytes family = 3; public static final int FAMILY_FIELD_NUMBER = 3; private com.google.protobuf.ByteString family_; /** @@ -11622,15 +12136,11 @@ public final class AdminProtos { return family_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - major_ = false; - family_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -11646,9 +12156,8 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, major_); @@ -11656,18 +12165,17 @@ public final class AdminProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, family_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -11677,19 +12185,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, family_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -11715,12 +12217,10 @@ public final class AdminProtos { result = result && getFamily() .equals(other.getFamily()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -11734,13 +12234,14 @@ public final class AdminProtos { } if (hasMajor()) { hash = (37 * hash) + MAJOR_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMajor()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getMajor()); } if (hasFamily()) { hash = (37 * hash) + FAMILY_FIELD_NUMBER; hash = (53 * hash) + getFamily().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -11768,68 +12269,80 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.CompactRegionRequest} - * *
      **
      * Compacts the specified region.  Performs a major compaction if specified.
      * <p>
      * This method is asynchronous.
      * 
+ * + * Protobuf type {@code hbase.pb.CompactRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CompactRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11842,23 +12355,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -11870,10 +12380,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionRequest_descriptor; @@ -11916,6 +12422,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest)other); @@ -11936,17 +12468,16 @@ public final class AdminProtos { if (other.hasFamily()) { setFamily(other.getFamily()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } return true; @@ -11961,7 +12492,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11971,9 +12502,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -11986,7 +12516,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -12027,6 +12557,7 @@ public final class AdminProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -12045,7 +12576,7 @@ public final class AdminProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -12068,19 +12599,20 @@ public final class AdminProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -12088,7 +12620,6 @@ public final class AdminProtos { return regionBuilder_; } - // optional bool major = 2; private boolean major_ ; /** * optional bool major = 2; @@ -12121,7 +12652,6 @@ public final class AdminProtos { return this; } - // optional bytes family = 3; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes family = 3; @@ -12156,54 +12686,83 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CompactRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.CompactRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new CompactRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CompactRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CompactRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CompactRegionRequest) } - public interface CompactRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CompactRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CompactRegionResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.CompactRegionResponse} */ - public static final class CompactRegionResponse extends - com.google.protobuf.GeneratedMessage - implements CompactRegionResponseOrBuilder { + public static final class CompactRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CompactRegionResponse) + CompactRegionResponseOrBuilder { // Use CompactRegionResponse.newBuilder() to construct. - private CompactRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private CompactRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CompactRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CompactRegionResponse defaultInstance; - public static CompactRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public CompactRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private CompactRegionResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CompactRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -12227,7 +12786,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -12238,34 +12797,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CompactRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CompactRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -12273,29 +12816,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -12306,12 +12841,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -12319,7 +12852,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -12347,46 +12880,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -12394,14 +12938,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.CompactRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CompactRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -12414,27 +12959,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_CompactRegionResponse_descriptor; @@ -12458,6 +12996,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse)other); @@ -12469,7 +13033,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -12486,7 +13051,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -12494,22 +13059,59 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CompactRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.CompactRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new CompactRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CompactRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CompactRegionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CompactRegionResponse) } - public interface UpdateFavoredNodesRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface UpdateFavoredNodesRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.UpdateFavoredNodesRequest) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1; /** * repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1; */ @@ -12537,36 +13139,28 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.UpdateFavoredNodesRequest} */ - public static final class UpdateFavoredNodesRequest extends - com.google.protobuf.GeneratedMessage - implements UpdateFavoredNodesRequestOrBuilder { + public static final class UpdateFavoredNodesRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.UpdateFavoredNodesRequest) + UpdateFavoredNodesRequestOrBuilder { // Use UpdateFavoredNodesRequest.newBuilder() to construct. - private UpdateFavoredNodesRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private UpdateFavoredNodesRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private UpdateFavoredNodesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final UpdateFavoredNodesRequest defaultInstance; - public static UpdateFavoredNodesRequest getDefaultInstance() { - return defaultInstance; } - - public UpdateFavoredNodesRequest getDefaultInstanceForType() { - return defaultInstance; + private UpdateFavoredNodesRequest() { + updateInfo_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private UpdateFavoredNodesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -12590,7 +13184,8 @@ public final class AdminProtos { updateInfo_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - updateInfo_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.PARSER, extensionRegistry)); + updateInfo_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.PARSER, extensionRegistry)); break; } } @@ -12599,7 +13194,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { updateInfo_ = java.util.Collections.unmodifiableList(updateInfo_); @@ -12613,32 +13208,17 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public UpdateFavoredNodesRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new UpdateFavoredNodesRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public interface RegionUpdateInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionUpdateInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionInfo region = 1; /** * required .hbase.pb.RegionInfo region = 1; */ @@ -12652,7 +13232,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder(); - // repeated .hbase.pb.ServerName favored_nodes = 2; /** * repeated .hbase.pb.ServerName favored_nodes = 2; */ @@ -12680,36 +13259,28 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo} */ - public static final class RegionUpdateInfo extends - com.google.protobuf.GeneratedMessage - implements RegionUpdateInfoOrBuilder { + public static final class RegionUpdateInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo) + RegionUpdateInfoOrBuilder { // Use RegionUpdateInfo.newBuilder() to construct. - private RegionUpdateInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionUpdateInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionUpdateInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionUpdateInfo defaultInstance; - public static RegionUpdateInfo getDefaultInstance() { - return defaultInstance; } - - public RegionUpdateInfo getDefaultInstanceForType() { - return defaultInstance; + private RegionUpdateInfo() { + favoredNodes_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionUpdateInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -12746,7 +13317,8 @@ public final class AdminProtos { favoredNodes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - favoredNodes_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); + favoredNodes_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); break; } } @@ -12755,7 +13327,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { favoredNodes_ = java.util.Collections.unmodifiableList(favoredNodes_); @@ -12769,30 +13341,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionUpdateInfo parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionUpdateInfo(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionInfo region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo region_; /** @@ -12805,16 +13361,15 @@ public final class AdminProtos { * required .hbase.pb.RegionInfo region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionInfo region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } - // repeated .hbase.pb.ServerName favored_nodes = 2; public static final int FAVORED_NODES_FIELD_NUMBER = 2; private java.util.List favoredNodes_; /** @@ -12850,14 +13405,11 @@ public final class AdminProtos { return favoredNodes_.get(index); } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - favoredNodes_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -12879,43 +13431,35 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } for (int i = 0; i < favoredNodes_.size(); i++) { output.writeMessage(2, favoredNodes_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } for (int i = 0; i < favoredNodes_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, favoredNodes_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -12933,12 +13477,10 @@ public final class AdminProtos { } result = result && getFavoredNodesList() .equals(other.getFavoredNodesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -12954,7 +13496,7 @@ public final class AdminProtos { hash = (37 * hash) + FAVORED_NODES_FIELD_NUMBER; hash = (53 * hash) + getFavoredNodesList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -12982,46 +13524,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -13029,14 +13582,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -13049,24 +13603,21 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getFavoredNodesFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -13080,10 +13631,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor; @@ -13127,6 +13674,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo)other); @@ -13160,29 +13733,27 @@ public final class AdminProtos { favoredNodes_ = other.favoredNodes_; bitField0_ = (bitField0_ & ~0x00000002); favoredNodesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFavoredNodesFieldBuilder() : null; } else { favoredNodesBuilder_.addAllMessages(other.favoredNodes_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } for (int i = 0; i < getFavoredNodesCount(); i++) { if (!getFavoredNodes(i).isInitialized()) { - return false; } } @@ -13198,7 +13769,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -13208,9 +13779,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionInfo region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionInfo region = 1; @@ -13223,7 +13793,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -13264,6 +13834,7 @@ public final class AdminProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -13282,7 +13853,7 @@ public final class AdminProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -13305,19 +13876,20 @@ public final class AdminProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionInfo region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -13325,7 +13897,6 @@ public final class AdminProtos { return regionBuilder_; } - // repeated .hbase.pb.ServerName favored_nodes = 2; private java.util.List favoredNodes_ = java.util.Collections.emptyList(); private void ensureFavoredNodesIsMutable() { @@ -13335,7 +13906,7 @@ public final class AdminProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> favoredNodesBuilder_; /** @@ -13467,7 +14038,8 @@ public final class AdminProtos { java.lang.Iterable values) { if (favoredNodesBuilder_ == null) { ensureFavoredNodesIsMutable(); - super.addAll(values, favoredNodes_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, favoredNodes_); onChanged(); } else { favoredNodesBuilder_.addAllMessages(values); @@ -13550,11 +14122,11 @@ public final class AdminProtos { getFavoredNodesBuilderList() { return getFavoredNodesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodesFieldBuilder() { if (favoredNodesBuilder_ == null) { - favoredNodesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + favoredNodesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( favoredNodes_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -13564,19 +14136,55 @@ public final class AdminProtos { } return favoredNodesBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo) } + // @@protoc_insertion_point(class_scope:hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo DEFAULT_INSTANCE; static { - defaultInstance = new RegionUpdateInfo(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionUpdateInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionUpdateInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo) } - // repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1; public static final int UPDATE_INFO_FIELD_NUMBER = 1; private java.util.List updateInfo_; /** @@ -13612,13 +14220,11 @@ public final class AdminProtos { return updateInfo_.get(index); } - private void initFields() { - updateInfo_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getUpdateInfoCount(); i++) { if (!getUpdateInfo(i).isInitialized()) { @@ -13632,16 +14238,14 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < updateInfo_.size(); i++) { output.writeMessage(1, updateInfo_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -13649,19 +14253,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, updateInfo_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -13674,12 +14272,10 @@ public final class AdminProtos { boolean result = true; result = result && getUpdateInfoList() .equals(other.getUpdateInfoList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -13691,7 +14287,7 @@ public final class AdminProtos { hash = (37 * hash) + UPDATE_INFO_FIELD_NUMBER; hash = (53 * hash) + getUpdateInfoList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -13719,46 +14315,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -13766,14 +14373,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.UpdateFavoredNodesRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.UpdateFavoredNodesRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -13786,19 +14394,16 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUpdateInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (updateInfoBuilder_ == null) { @@ -13810,10 +14415,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor; @@ -13847,6 +14448,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest)other); @@ -13877,21 +14504,21 @@ public final class AdminProtos { updateInfo_ = other.updateInfo_; bitField0_ = (bitField0_ & ~0x00000001); updateInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUpdateInfoFieldBuilder() : null; } else { updateInfoBuilder_.addAllMessages(other.updateInfo_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getUpdateInfoCount(); i++) { if (!getUpdateInfo(i).isInitialized()) { - return false; } } @@ -13907,7 +14534,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -13917,7 +14544,6 @@ public final class AdminProtos { } private int bitField0_; - // repeated .hbase.pb.UpdateFavoredNodesRequest.RegionUpdateInfo update_info = 1; private java.util.List updateInfo_ = java.util.Collections.emptyList(); private void ensureUpdateInfoIsMutable() { @@ -13927,7 +14553,7 @@ public final class AdminProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder> updateInfoBuilder_; /** @@ -14059,7 +14685,8 @@ public final class AdminProtos { java.lang.Iterable values) { if (updateInfoBuilder_ == null) { ensureUpdateInfoIsMutable(); - super.addAll(values, updateInfo_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, updateInfo_); onChanged(); } else { updateInfoBuilder_.addAllMessages(values); @@ -14142,11 +14769,11 @@ public final class AdminProtos { getUpdateInfoBuilderList() { return getUpdateInfoFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder> getUpdateInfoFieldBuilder() { if (updateInfoBuilder_ == null) { - updateInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + updateInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfoOrBuilder>( updateInfo_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -14156,23 +14783,60 @@ public final class AdminProtos { } return updateInfoBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.UpdateFavoredNodesRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.UpdateFavoredNodesRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest DEFAULT_INSTANCE; static { - defaultInstance = new UpdateFavoredNodesRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest(); } - // @@protoc_insertion_point(class_scope:hbase.pb.UpdateFavoredNodesRequest) - } - - public interface UpdateFavoredNodesResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } - // optional uint32 response = 1; - /** + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public UpdateFavoredNodesRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UpdateFavoredNodesRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface UpdateFavoredNodesResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.UpdateFavoredNodesResponse) + com.google.protobuf.MessageOrBuilder { + + /** * optional uint32 response = 1; */ boolean hasResponse(); @@ -14184,36 +14848,28 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.UpdateFavoredNodesResponse} */ - public static final class UpdateFavoredNodesResponse extends - com.google.protobuf.GeneratedMessage - implements UpdateFavoredNodesResponseOrBuilder { + public static final class UpdateFavoredNodesResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.UpdateFavoredNodesResponse) + UpdateFavoredNodesResponseOrBuilder { // Use UpdateFavoredNodesResponse.newBuilder() to construct. - private UpdateFavoredNodesResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private UpdateFavoredNodesResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private UpdateFavoredNodesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final UpdateFavoredNodesResponse defaultInstance; - public static UpdateFavoredNodesResponse getDefaultInstance() { - return defaultInstance; } - - public UpdateFavoredNodesResponse getDefaultInstanceForType() { - return defaultInstance; + private UpdateFavoredNodesResponse() { + response_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private UpdateFavoredNodesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -14243,7 +14899,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -14254,30 +14910,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public UpdateFavoredNodesResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new UpdateFavoredNodesResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint32 response = 1; public static final int RESPONSE_FIELD_NUMBER = 1; private int response_; /** @@ -14293,13 +14933,11 @@ public final class AdminProtos { return response_; } - private void initFields() { - response_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -14307,16 +14945,14 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, response_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -14324,19 +14960,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, response_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -14352,12 +14982,10 @@ public final class AdminProtos { result = result && (getResponse() == other.getResponse()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -14369,7 +14997,7 @@ public final class AdminProtos { hash = (37 * hash) + RESPONSE_FIELD_NUMBER; hash = (53 * hash) + getResponse(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -14397,46 +15025,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -14444,14 +15083,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.UpdateFavoredNodesResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.UpdateFavoredNodesResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -14464,18 +15104,15 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); response_ = 0; @@ -14483,10 +15120,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor; @@ -14517,6 +15150,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse)other); @@ -14531,7 +15190,8 @@ public final class AdminProtos { if (other.hasResponse()) { setResponse(other.getResponse()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -14548,7 +15208,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -14558,7 +15218,6 @@ public final class AdminProtos { } private int bitField0_; - // optional uint32 response = 1; private int response_ ; /** * optional uint32 response = 1; @@ -14590,22 +15249,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.UpdateFavoredNodesResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.UpdateFavoredNodesResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse DEFAULT_INSTANCE; static { - defaultInstance = new UpdateFavoredNodesResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public UpdateFavoredNodesResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UpdateFavoredNodesResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.UpdateFavoredNodesResponse) } - public interface MergeRegionsRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MergeRegionsRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MergeRegionsRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region_a = 1; /** * required .hbase.pb.RegionSpecifier region_a = 1; */ @@ -14619,7 +15315,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder(); - // required .hbase.pb.RegionSpecifier region_b = 2; /** * required .hbase.pb.RegionSpecifier region_b = 2; */ @@ -14633,7 +15328,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder(); - // optional bool forcible = 3 [default = false]; /** * optional bool forcible = 3 [default = false]; */ @@ -14643,64 +15337,56 @@ public final class AdminProtos { */ boolean getForcible(); - // optional uint64 master_system_time = 4; /** - * optional uint64 master_system_time = 4; - * *
      * wall clock time from master
      * 
+ * + * optional uint64 master_system_time = 4; */ boolean hasMasterSystemTime(); /** - * optional uint64 master_system_time = 4; - * *
      * wall clock time from master
      * 
+ * + * optional uint64 master_system_time = 4; */ long getMasterSystemTime(); } /** - * Protobuf type {@code hbase.pb.MergeRegionsRequest} - * *
    **
    * Merges the specified regions.
    * <p>
    * This method currently closes the regions and then merges them
    * 
+ * + * Protobuf type {@code hbase.pb.MergeRegionsRequest} */ - public static final class MergeRegionsRequest extends - com.google.protobuf.GeneratedMessage - implements MergeRegionsRequestOrBuilder { + public static final class MergeRegionsRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MergeRegionsRequest) + MergeRegionsRequestOrBuilder { // Use MergeRegionsRequest.newBuilder() to construct. - private MergeRegionsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private MergeRegionsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MergeRegionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MergeRegionsRequest defaultInstance; - public static MergeRegionsRequest getDefaultInstance() { - return defaultInstance; } - - public MergeRegionsRequest getDefaultInstanceForType() { - return defaultInstance; + private MergeRegionsRequest() { + forcible_ = false; + masterSystemTime_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MergeRegionsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -14761,7 +15447,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -14772,30 +15458,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MergeRegionsRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MergeRegionsRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region_a = 1; public static final int REGION_A_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionA_; /** @@ -14808,16 +15478,15 @@ public final class AdminProtos { * required .hbase.pb.RegionSpecifier region_a = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() { - return regionA_; + return regionA_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionA_; } /** * required .hbase.pb.RegionSpecifier region_a = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() { - return regionA_; + return regionA_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionA_; } - // required .hbase.pb.RegionSpecifier region_b = 2; public static final int REGION_B_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionB_; /** @@ -14830,16 +15499,15 @@ public final class AdminProtos { * required .hbase.pb.RegionSpecifier region_b = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() { - return regionB_; + return regionB_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionB_; } /** * required .hbase.pb.RegionSpecifier region_b = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() { - return regionB_; + return regionB_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionB_; } - // optional bool forcible = 3 [default = false]; public static final int FORCIBLE_FIELD_NUMBER = 3; private boolean forcible_; /** @@ -14855,40 +15523,34 @@ public final class AdminProtos { return forcible_; } - // optional uint64 master_system_time = 4; public static final int MASTER_SYSTEM_TIME_FIELD_NUMBER = 4; private long masterSystemTime_; /** - * optional uint64 master_system_time = 4; - * *
      * wall clock time from master
      * 
+ * + * optional uint64 master_system_time = 4; */ public boolean hasMasterSystemTime() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional uint64 master_system_time = 4; - * *
      * wall clock time from master
      * 
+ * + * optional uint64 master_system_time = 4; */ public long getMasterSystemTime() { return masterSystemTime_; } - private void initFields() { - regionA_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - regionB_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - forcible_ = false; - masterSystemTime_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegionA()) { memoizedIsInitialized = 0; @@ -14912,12 +15574,11 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionA_); + output.writeMessage(1, getRegionA()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, regionB_); + output.writeMessage(2, getRegionB()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, forcible_); @@ -14925,22 +15586,21 @@ public final class AdminProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, masterSystemTime_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionA_); + .computeMessageSize(1, getRegionA()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, regionB_); + .computeMessageSize(2, getRegionB()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -14950,19 +15610,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, masterSystemTime_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -14993,12 +15647,10 @@ public final class AdminProtos { result = result && (getMasterSystemTime() == other.getMasterSystemTime()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -15016,13 +15668,15 @@ public final class AdminProtos { } if (hasForcible()) { hash = (37 * hash) + FORCIBLE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getForcible()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getForcible()); } if (hasMasterSystemTime()) { hash = (37 * hash) + MASTER_SYSTEM_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getMasterSystemTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getMasterSystemTime()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -15050,68 +15704,80 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.MergeRegionsRequest} - * *
      **
      * Merges the specified regions.
      * <p>
      * This method currently closes the regions and then merges them
      * 
+ * + * Protobuf type {@code hbase.pb.MergeRegionsRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MergeRegionsRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -15124,30 +15790,27 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionAFieldBuilder(); getRegionBFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionABuilder_ == null) { - regionA_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionA_ = null; } else { regionABuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (regionBBuilder_ == null) { - regionB_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionB_ = null; } else { regionBBuilder_.clear(); } @@ -15159,10 +15822,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsRequest_descriptor; @@ -15213,6 +15872,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest)other); @@ -15236,25 +15921,22 @@ public final class AdminProtos { if (other.hasMasterSystemTime()) { setMasterSystemTime(other.getMasterSystemTime()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionA()) { - return false; } if (!hasRegionB()) { - return false; } if (!getRegionA().isInitialized()) { - return false; } if (!getRegionB().isInitialized()) { - return false; } return true; @@ -15269,7 +15951,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15279,9 +15961,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region_a = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionA_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionA_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionABuilder_; /** * required .hbase.pb.RegionSpecifier region_a = 1; @@ -15294,7 +15975,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() { if (regionABuilder_ == null) { - return regionA_; + return regionA_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionA_; } else { return regionABuilder_.getMessage(); } @@ -15335,6 +16016,7 @@ public final class AdminProtos { public Builder mergeRegionA(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionABuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + regionA_ != null && regionA_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { regionA_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionA_).mergeFrom(value).buildPartial(); @@ -15353,7 +16035,7 @@ public final class AdminProtos { */ public Builder clearRegionA() { if (regionABuilder_ == null) { - regionA_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionA_ = null; onChanged(); } else { regionABuilder_.clear(); @@ -15376,19 +16058,20 @@ public final class AdminProtos { if (regionABuilder_ != null) { return regionABuilder_.getMessageOrBuilder(); } else { - return regionA_; + return regionA_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionA_; } } /** * required .hbase.pb.RegionSpecifier region_a = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionAFieldBuilder() { if (regionABuilder_ == null) { - regionABuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionABuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - regionA_, + getRegionA(), getParentForChildren(), isClean()); regionA_ = null; @@ -15396,9 +16079,8 @@ public final class AdminProtos { return regionABuilder_; } - // required .hbase.pb.RegionSpecifier region_b = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionB_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionB_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBBuilder_; /** * required .hbase.pb.RegionSpecifier region_b = 2; @@ -15411,7 +16093,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() { if (regionBBuilder_ == null) { - return regionB_; + return regionB_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionB_; } else { return regionBBuilder_.getMessage(); } @@ -15452,6 +16134,7 @@ public final class AdminProtos { public Builder mergeRegionB(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + regionB_ != null && regionB_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { regionB_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionB_).mergeFrom(value).buildPartial(); @@ -15470,7 +16153,7 @@ public final class AdminProtos { */ public Builder clearRegionB() { if (regionBBuilder_ == null) { - regionB_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionB_ = null; onChanged(); } else { regionBBuilder_.clear(); @@ -15493,19 +16176,20 @@ public final class AdminProtos { if (regionBBuilder_ != null) { return regionBBuilder_.getMessageOrBuilder(); } else { - return regionB_; + return regionB_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionB_; } } /** * required .hbase.pb.RegionSpecifier region_b = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionBFieldBuilder() { if (regionBBuilder_ == null) { - regionBBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - regionB_, + getRegionB(), getParentForChildren(), isClean()); regionB_ = null; @@ -15513,7 +16197,6 @@ public final class AdminProtos { return regionBBuilder_; } - // optional bool forcible = 3 [default = false]; private boolean forcible_ ; /** * optional bool forcible = 3 [default = false]; @@ -15546,34 +16229,33 @@ public final class AdminProtos { return this; } - // optional uint64 master_system_time = 4; private long masterSystemTime_ ; /** - * optional uint64 master_system_time = 4; - * *
        * wall clock time from master
        * 
+ * + * optional uint64 master_system_time = 4; */ public boolean hasMasterSystemTime() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional uint64 master_system_time = 4; - * *
        * wall clock time from master
        * 
+ * + * optional uint64 master_system_time = 4; */ public long getMasterSystemTime() { return masterSystemTime_; } /** - * optional uint64 master_system_time = 4; - * *
        * wall clock time from master
        * 
+ * + * optional uint64 master_system_time = 4; */ public Builder setMasterSystemTime(long value) { bitField0_ |= 0x00000008; @@ -15582,11 +16264,11 @@ public final class AdminProtos { return this; } /** - * optional uint64 master_system_time = 4; - * *
        * wall clock time from master
        * 
+ * + * optional uint64 master_system_time = 4; */ public Builder clearMasterSystemTime() { bitField0_ = (bitField0_ & ~0x00000008); @@ -15594,54 +16276,83 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MergeRegionsRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.MergeRegionsRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest DEFAULT_INSTANCE; static { - defaultInstance = new MergeRegionsRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MergeRegionsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MergeRegionsRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MergeRegionsRequest) } - public interface MergeRegionsResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MergeRegionsResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MergeRegionsResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.MergeRegionsResponse} */ - public static final class MergeRegionsResponse extends - com.google.protobuf.GeneratedMessage - implements MergeRegionsResponseOrBuilder { + public static final class MergeRegionsResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MergeRegionsResponse) + MergeRegionsResponseOrBuilder { // Use MergeRegionsResponse.newBuilder() to construct. - private MergeRegionsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private MergeRegionsResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MergeRegionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MergeRegionsResponse defaultInstance; - public static MergeRegionsResponse getDefaultInstance() { - return defaultInstance; } - - public MergeRegionsResponse getDefaultInstanceForType() { - return defaultInstance; + private MergeRegionsResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MergeRegionsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -15665,7 +16376,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -15676,34 +16387,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MergeRegionsResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MergeRegionsResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -15711,29 +16406,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -15744,12 +16431,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -15757,7 +16442,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -15785,46 +16470,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -15832,14 +16528,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.MergeRegionsResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MergeRegionsResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -15852,27 +16549,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_MergeRegionsResponse_descriptor; @@ -15896,6 +16586,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse)other); @@ -15907,7 +16623,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -15924,7 +16641,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15932,22 +16649,59 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MergeRegionsResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.MergeRegionsResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse DEFAULT_INSTANCE; static { - defaultInstance = new MergeRegionsResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MergeRegionsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MergeRegionsResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MergeRegionsResponse) } - public interface WALEntryOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface WALEntryOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.WALEntry) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.WALKey key = 1; /** * required .hbase.pb.WALKey key = 1; */ @@ -15961,93 +16715,84 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder(); - // repeated bytes key_value_bytes = 2; /** - * repeated bytes key_value_bytes = 2; - * *
      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
      * and associated_cell_count has count of Cells associated w/ this WALEntry
      * 
+ * + * repeated bytes key_value_bytes = 2; */ java.util.List getKeyValueBytesList(); /** - * repeated bytes key_value_bytes = 2; - * *
      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
      * and associated_cell_count has count of Cells associated w/ this WALEntry
      * 
+ * + * repeated bytes key_value_bytes = 2; */ int getKeyValueBytesCount(); /** - * repeated bytes key_value_bytes = 2; - * *
      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
      * and associated_cell_count has count of Cells associated w/ this WALEntry
      * 
+ * + * repeated bytes key_value_bytes = 2; */ com.google.protobuf.ByteString getKeyValueBytes(int index); - // optional int32 associated_cell_count = 3; /** - * optional int32 associated_cell_count = 3; - * *
      * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
      * 
+ * + * optional int32 associated_cell_count = 3; */ boolean hasAssociatedCellCount(); /** - * optional int32 associated_cell_count = 3; - * *
      * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
      * 
+ * + * optional int32 associated_cell_count = 3; */ int getAssociatedCellCount(); } /** - * Protobuf type {@code hbase.pb.WALEntry} - * *
    * Protocol buffer version of WAL for replication
    * 
+ * + * Protobuf type {@code hbase.pb.WALEntry} */ - public static final class WALEntry extends - com.google.protobuf.GeneratedMessage - implements WALEntryOrBuilder { + public static final class WALEntry extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.WALEntry) + WALEntryOrBuilder { // Use WALEntry.newBuilder() to construct. - private WALEntry(com.google.protobuf.GeneratedMessage.Builder builder) { + private WALEntry(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private WALEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final WALEntry defaultInstance; - public static WALEntry getDefaultInstance() { - return defaultInstance; - } - - public WALEntry getDefaultInstanceForType() { - return defaultInstance; + private WALEntry() { + keyValueBytes_ = java.util.Collections.emptyList(); + associatedCellCount_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private WALEntry( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -16098,7 +16843,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); @@ -16112,30 +16857,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WALEntry_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WALEntry_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WALEntry parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new WALEntry(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.WALKey key = 1; public static final int KEY_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey key_; /** @@ -16148,89 +16877,83 @@ public final class AdminProtos { * required .hbase.pb.WALKey key = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey getKey() { - return key_; + return key_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance() : key_; } /** * required .hbase.pb.WALKey key = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder() { - return key_; + return key_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance() : key_; } - // repeated bytes key_value_bytes = 2; public static final int KEY_VALUE_BYTES_FIELD_NUMBER = 2; private java.util.List keyValueBytes_; /** - * repeated bytes key_value_bytes = 2; - * *
      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
      * and associated_cell_count has count of Cells associated w/ this WALEntry
      * 
+ * + * repeated bytes key_value_bytes = 2; */ public java.util.List getKeyValueBytesList() { return keyValueBytes_; } /** - * repeated bytes key_value_bytes = 2; - * *
      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
      * and associated_cell_count has count of Cells associated w/ this WALEntry
      * 
+ * + * repeated bytes key_value_bytes = 2; */ public int getKeyValueBytesCount() { return keyValueBytes_.size(); } /** - * repeated bytes key_value_bytes = 2; - * *
      * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
      * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
      * and associated_cell_count has count of Cells associated w/ this WALEntry
      * 
+ * + * repeated bytes key_value_bytes = 2; */ public com.google.protobuf.ByteString getKeyValueBytes(int index) { return keyValueBytes_.get(index); } - // optional int32 associated_cell_count = 3; public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 3; private int associatedCellCount_; /** - * optional int32 associated_cell_count = 3; - * *
      * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
      * 
+ * + * optional int32 associated_cell_count = 3; */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional int32 associated_cell_count = 3; - * *
      * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
      * 
+ * + * optional int32 associated_cell_count = 3; */ public int getAssociatedCellCount() { return associatedCellCount_; } - private void initFields() { - key_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); - keyValueBytes_ = java.util.Collections.emptyList(); - associatedCellCount_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasKey()) { memoizedIsInitialized = 0; @@ -16246,9 +16969,8 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, key_); + output.writeMessage(1, getKey()); } for (int i = 0; i < keyValueBytes_.size(); i++) { output.writeBytes(2, keyValueBytes_.get(i)); @@ -16256,18 +16978,17 @@ public final class AdminProtos { if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(3, associatedCellCount_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, key_); + .computeMessageSize(1, getKey()); } { int dataSize = 0; @@ -16282,19 +17003,13 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, associatedCellCount_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -16317,12 +17032,10 @@ public final class AdminProtos { result = result && (getAssociatedCellCount() == other.getAssociatedCellCount()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -16342,7 +17055,7 @@ public final class AdminProtos { hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER; hash = (53 * hash) + getAssociatedCellCount(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -16370,65 +17083,77 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.WALEntry} - * *
      * Protocol buffer version of WAL for replication
      * 
+ * + * Protobuf type {@code hbase.pb.WALEntry} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntryOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.WALEntry) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntryOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WALEntry_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WALEntry_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -16441,23 +17166,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getKeyFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (keyBuilder_ == null) { - key_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); + key_ = null; } else { keyBuilder_.clear(); } @@ -16469,10 +17191,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_WALEntry_descriptor; @@ -16516,6 +17234,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry)other); @@ -16543,17 +17287,16 @@ public final class AdminProtos { if (other.hasAssociatedCellCount()) { setAssociatedCellCount(other.getAssociatedCellCount()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasKey()) { - return false; } if (!getKey().isInitialized()) { - return false; } return true; @@ -16568,7 +17311,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -16578,9 +17321,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.WALKey key = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey key_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey key_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder> keyBuilder_; /** * required .hbase.pb.WALKey key = 1; @@ -16593,7 +17335,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey getKey() { if (keyBuilder_ == null) { - return key_; + return key_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance() : key_; } else { return keyBuilder_.getMessage(); } @@ -16634,6 +17376,7 @@ public final class AdminProtos { public Builder mergeKey(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey value) { if (keyBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + key_ != null && key_ != org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) { key_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.newBuilder(key_).mergeFrom(value).buildPartial(); @@ -16652,7 +17395,7 @@ public final class AdminProtos { */ public Builder clearKey() { if (keyBuilder_ == null) { - key_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); + key_ = null; onChanged(); } else { keyBuilder_.clear(); @@ -16675,19 +17418,20 @@ public final class AdminProtos { if (keyBuilder_ != null) { return keyBuilder_.getMessageOrBuilder(); } else { - return key_; + return key_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance() : key_; } } /** * required .hbase.pb.WALKey key = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder> getKeyFieldBuilder() { if (keyBuilder_ == null) { - keyBuilder_ = new com.google.protobuf.SingleFieldBuilder< + keyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder>( - key_, + getKey(), getParentForChildren(), isClean()); key_ = null; @@ -16695,7 +17439,6 @@ public final class AdminProtos { return keyBuilder_; } - // repeated bytes key_value_bytes = 2; private java.util.List keyValueBytes_ = java.util.Collections.emptyList(); private void ensureKeyValueBytesIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { @@ -16704,50 +17447,50 @@ public final class AdminProtos { } } /** - * repeated bytes key_value_bytes = 2; - * *
        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
        * and associated_cell_count has count of Cells associated w/ this WALEntry
        * 
+ * + * repeated bytes key_value_bytes = 2; */ public java.util.List getKeyValueBytesList() { return java.util.Collections.unmodifiableList(keyValueBytes_); } /** - * repeated bytes key_value_bytes = 2; - * *
        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
        * and associated_cell_count has count of Cells associated w/ this WALEntry
        * 
+ * + * repeated bytes key_value_bytes = 2; */ public int getKeyValueBytesCount() { return keyValueBytes_.size(); } /** - * repeated bytes key_value_bytes = 2; - * *
        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
        * and associated_cell_count has count of Cells associated w/ this WALEntry
        * 
+ * + * repeated bytes key_value_bytes = 2; */ public com.google.protobuf.ByteString getKeyValueBytes(int index) { return keyValueBytes_.get(index); } /** - * repeated bytes key_value_bytes = 2; - * *
        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
        * and associated_cell_count has count of Cells associated w/ this WALEntry
        * 
+ * + * repeated bytes key_value_bytes = 2; */ public Builder setKeyValueBytes( int index, com.google.protobuf.ByteString value) { @@ -16760,13 +17503,13 @@ public final class AdminProtos { return this; } /** - * repeated bytes key_value_bytes = 2; - * *
        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
        * and associated_cell_count has count of Cells associated w/ this WALEntry
        * 
+ * + * repeated bytes key_value_bytes = 2; */ public Builder addKeyValueBytes(com.google.protobuf.ByteString value) { if (value == null) { @@ -16778,29 +17521,30 @@ public final class AdminProtos { return this; } /** - * repeated bytes key_value_bytes = 2; - * *
        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
        * and associated_cell_count has count of Cells associated w/ this WALEntry
        * 
+ * + * repeated bytes key_value_bytes = 2; */ public Builder addAllKeyValueBytes( java.lang.Iterable values) { ensureKeyValueBytesIsMutable(); - super.addAll(values, keyValueBytes_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, keyValueBytes_); onChanged(); return this; } /** - * repeated bytes key_value_bytes = 2; - * *
        * Following may be null if the KVs/Cells are carried along the side in a cellblock (See
        * RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
        * and associated_cell_count has count of Cells associated w/ this WALEntry
        * 
+ * + * repeated bytes key_value_bytes = 2; */ public Builder clearKeyValueBytes() { keyValueBytes_ = java.util.Collections.emptyList(); @@ -16809,34 +17553,33 @@ public final class AdminProtos { return this; } - // optional int32 associated_cell_count = 3; private int associatedCellCount_ ; /** - * optional int32 associated_cell_count = 3; - * *
        * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
        * 
+ * + * optional int32 associated_cell_count = 3; */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional int32 associated_cell_count = 3; - * *
        * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
        * 
+ * + * optional int32 associated_cell_count = 3; */ public int getAssociatedCellCount() { return associatedCellCount_; } /** - * optional int32 associated_cell_count = 3; - * *
        * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
        * 
+ * + * optional int32 associated_cell_count = 3; */ public Builder setAssociatedCellCount(int value) { bitField0_ |= 0x00000004; @@ -16845,11 +17588,11 @@ public final class AdminProtos { return this; } /** - * optional int32 associated_cell_count = 3; - * *
        * If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
        * 
+ * + * optional int32 associated_cell_count = 3; */ public Builder clearAssociatedCellCount() { bitField0_ = (bitField0_ & ~0x00000004); @@ -16857,22 +17600,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.WALEntry) } + // @@protoc_insertion_point(class_scope:hbase.pb.WALEntry) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry DEFAULT_INSTANCE; static { - defaultInstance = new WALEntry(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public WALEntry parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WALEntry(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.WALEntry) } - public interface ReplicateWALEntryRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReplicateWALEntryRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReplicateWALEntryRequest) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.WALEntry entry = 1; /** * repeated .hbase.pb.WALEntry entry = 1; */ @@ -16897,7 +17677,6 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntryOrBuilder getEntryOrBuilder( int index); - // optional string replicationClusterId = 2; /** * optional string replicationClusterId = 2; */ @@ -16912,7 +17691,6 @@ public final class AdminProtos { com.google.protobuf.ByteString getReplicationClusterIdBytes(); - // optional string sourceBaseNamespaceDirPath = 3; /** * optional string sourceBaseNamespaceDirPath = 3; */ @@ -16927,7 +17705,6 @@ public final class AdminProtos { com.google.protobuf.ByteString getSourceBaseNamespaceDirPathBytes(); - // optional string sourceHFileArchiveDirPath = 4; /** * optional string sourceHFileArchiveDirPath = 4; */ @@ -16943,45 +17720,40 @@ public final class AdminProtos { getSourceHFileArchiveDirPathBytes(); } /** - * Protobuf type {@code hbase.pb.ReplicateWALEntryRequest} - * *
    **
    * Replicates the given entries. The guarantee is that the given entries
    * will be durable on the slave cluster if this method returns without
    * any exception.  hbase.replication has to be set to true for this to work.
    * 
+ * + * Protobuf type {@code hbase.pb.ReplicateWALEntryRequest} */ - public static final class ReplicateWALEntryRequest extends - com.google.protobuf.GeneratedMessage - implements ReplicateWALEntryRequestOrBuilder { + public static final class ReplicateWALEntryRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReplicateWALEntryRequest) + ReplicateWALEntryRequestOrBuilder { // Use ReplicateWALEntryRequest.newBuilder() to construct. - private ReplicateWALEntryRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReplicateWALEntryRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReplicateWALEntryRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReplicateWALEntryRequest defaultInstance; - public static ReplicateWALEntryRequest getDefaultInstance() { - return defaultInstance; } - - public ReplicateWALEntryRequest getDefaultInstanceForType() { - return defaultInstance; + private ReplicateWALEntryRequest() { + entry_ = java.util.Collections.emptyList(); + replicationClusterId_ = ""; + sourceBaseNamespaceDirPath_ = ""; + sourceHFileArchiveDirPath_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReplicateWALEntryRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -17005,22 +17777,26 @@ public final class AdminProtos { entry_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - entry_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry.PARSER, extensionRegistry)); + entry_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry.PARSER, extensionRegistry)); break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - replicationClusterId_ = input.readBytes(); + replicationClusterId_ = bs; break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - sourceBaseNamespaceDirPath_ = input.readBytes(); + sourceBaseNamespaceDirPath_ = bs; break; } case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - sourceHFileArchiveDirPath_ = input.readBytes(); + sourceHFileArchiveDirPath_ = bs; break; } } @@ -17029,7 +17805,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { entry_ = java.util.Collections.unmodifiableList(entry_); @@ -17043,30 +17819,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReplicateWALEntryRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReplicateWALEntryRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // repeated .hbase.pb.WALEntry entry = 1; public static final int ENTRY_FIELD_NUMBER = 1; private java.util.List entry_; /** @@ -17102,9 +17862,8 @@ public final class AdminProtos { return entry_.get(index); } - // optional string replicationClusterId = 2; public static final int REPLICATIONCLUSTERID_FIELD_NUMBER = 2; - private java.lang.Object replicationClusterId_; + private volatile java.lang.Object replicationClusterId_; /** * optional string replicationClusterId = 2; */ @@ -17145,9 +17904,8 @@ public final class AdminProtos { } } - // optional string sourceBaseNamespaceDirPath = 3; public static final int SOURCEBASENAMESPACEDIRPATH_FIELD_NUMBER = 3; - private java.lang.Object sourceBaseNamespaceDirPath_; + private volatile java.lang.Object sourceBaseNamespaceDirPath_; /** * optional string sourceBaseNamespaceDirPath = 3; */ @@ -17188,9 +17946,8 @@ public final class AdminProtos { } } - // optional string sourceHFileArchiveDirPath = 4; public static final int SOURCEHFILEARCHIVEDIRPATH_FIELD_NUMBER = 4; - private java.lang.Object sourceHFileArchiveDirPath_; + private volatile java.lang.Object sourceHFileArchiveDirPath_; /** * optional string sourceHFileArchiveDirPath = 4; */ @@ -17231,16 +17988,11 @@ public final class AdminProtos { } } - private void initFields() { - entry_ = java.util.Collections.emptyList(); - replicationClusterId_ = ""; - sourceBaseNamespaceDirPath_ = ""; - sourceHFileArchiveDirPath_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getEntryCount(); i++) { if (!getEntry(i).isInitialized()) { @@ -17254,25 +18006,23 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < entry_.size(); i++) { output.writeMessage(1, entry_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(2, getReplicationClusterIdBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, replicationClusterId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(3, getSourceBaseNamespaceDirPathBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, sourceBaseNamespaceDirPath_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(4, getSourceHFileArchiveDirPathBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, sourceHFileArchiveDirPath_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -17281,30 +18031,21 @@ public final class AdminProtos { .computeMessageSize(1, entry_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getReplicationClusterIdBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, replicationClusterId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getSourceBaseNamespaceDirPathBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, sourceBaseNamespaceDirPath_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getSourceHFileArchiveDirPathBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, sourceHFileArchiveDirPath_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -17332,12 +18073,10 @@ public final class AdminProtos { result = result && getSourceHFileArchiveDirPath() .equals(other.getSourceHFileArchiveDirPath()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -17361,7 +18100,7 @@ public final class AdminProtos { hash = (37 * hash) + SOURCEHFILEARCHIVEDIRPATH_FIELD_NUMBER; hash = (53 * hash) + getSourceHFileArchiveDirPath().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -17389,68 +18128,80 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ReplicateWALEntryRequest} - * *
      **
      * Replicates the given entries. The guarantee is that the given entries
      * will be durable on the slave cluster if this method returns without
      * any exception.  hbase.replication has to be set to true for this to work.
      * 
+ * + * Protobuf type {@code hbase.pb.ReplicateWALEntryRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReplicateWALEntryRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -17463,19 +18214,16 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getEntryFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (entryBuilder_ == null) { @@ -17493,10 +18241,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor; @@ -17544,6 +18288,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)other); @@ -17574,7 +18344,7 @@ public final class AdminProtos { entry_ = other.entry_; bitField0_ = (bitField0_ & ~0x00000001); entryBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEntryFieldBuilder() : null; } else { entryBuilder_.addAllMessages(other.entry_); @@ -17596,14 +18366,14 @@ public final class AdminProtos { sourceHFileArchiveDirPath_ = other.sourceHFileArchiveDirPath_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getEntryCount(); i++) { if (!getEntry(i).isInitialized()) { - return false; } } @@ -17619,7 +18389,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -17629,7 +18399,6 @@ public final class AdminProtos { } private int bitField0_; - // repeated .hbase.pb.WALEntry entry = 1; private java.util.List entry_ = java.util.Collections.emptyList(); private void ensureEntryIsMutable() { @@ -17639,7 +18408,7 @@ public final class AdminProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntryOrBuilder> entryBuilder_; /** @@ -17771,7 +18540,8 @@ public final class AdminProtos { java.lang.Iterable values) { if (entryBuilder_ == null) { ensureEntryIsMutable(); - super.addAll(values, entry_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, entry_); onChanged(); } else { entryBuilder_.addAllMessages(values); @@ -17854,11 +18624,11 @@ public final class AdminProtos { getEntryBuilderList() { return getEntryFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntryOrBuilder> getEntryFieldBuilder() { if (entryBuilder_ == null) { - entryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + entryBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntryOrBuilder>( entry_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -17869,7 +18639,6 @@ public final class AdminProtos { return entryBuilder_; } - // optional string replicationClusterId = 2; private java.lang.Object replicationClusterId_ = ""; /** * optional string replicationClusterId = 2; @@ -17883,9 +18652,12 @@ public final class AdminProtos { public java.lang.String getReplicationClusterId() { java.lang.Object ref = replicationClusterId_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - replicationClusterId_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + replicationClusterId_ = s; + } return s; } else { return (java.lang.String) ref; @@ -17943,7 +18715,6 @@ public final class AdminProtos { return this; } - // optional string sourceBaseNamespaceDirPath = 3; private java.lang.Object sourceBaseNamespaceDirPath_ = ""; /** * optional string sourceBaseNamespaceDirPath = 3; @@ -17957,9 +18728,12 @@ public final class AdminProtos { public java.lang.String getSourceBaseNamespaceDirPath() { java.lang.Object ref = sourceBaseNamespaceDirPath_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - sourceBaseNamespaceDirPath_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + sourceBaseNamespaceDirPath_ = s; + } return s; } else { return (java.lang.String) ref; @@ -18017,7 +18791,6 @@ public final class AdminProtos { return this; } - // optional string sourceHFileArchiveDirPath = 4; private java.lang.Object sourceHFileArchiveDirPath_ = ""; /** * optional string sourceHFileArchiveDirPath = 4; @@ -18031,9 +18804,12 @@ public final class AdminProtos { public java.lang.String getSourceHFileArchiveDirPath() { java.lang.Object ref = sourceHFileArchiveDirPath_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - sourceHFileArchiveDirPath_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + sourceHFileArchiveDirPath_ = s; + } return s; } else { return (java.lang.String) ref; @@ -18090,54 +18866,83 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReplicateWALEntryRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReplicateWALEntryRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest DEFAULT_INSTANCE; static { - defaultInstance = new ReplicateWALEntryRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReplicateWALEntryRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicateWALEntryRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReplicateWALEntryRequest) } - public interface ReplicateWALEntryResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReplicateWALEntryResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReplicateWALEntryResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.ReplicateWALEntryResponse} */ - public static final class ReplicateWALEntryResponse extends - com.google.protobuf.GeneratedMessage - implements ReplicateWALEntryResponseOrBuilder { + public static final class ReplicateWALEntryResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReplicateWALEntryResponse) + ReplicateWALEntryResponseOrBuilder { // Use ReplicateWALEntryResponse.newBuilder() to construct. - private ReplicateWALEntryResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReplicateWALEntryResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReplicateWALEntryResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReplicateWALEntryResponse defaultInstance; - public static ReplicateWALEntryResponse getDefaultInstance() { - return defaultInstance; } - - public ReplicateWALEntryResponse getDefaultInstanceForType() { - return defaultInstance; + private ReplicateWALEntryResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReplicateWALEntryResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -18161,7 +18966,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -18172,34 +18977,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReplicateWALEntryResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReplicateWALEntryResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -18207,29 +18996,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -18240,12 +19021,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -18253,7 +19032,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -18281,46 +19060,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -18328,14 +19118,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.ReplicateWALEntryResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReplicateWALEntryResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -18348,27 +19139,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor; @@ -18392,6 +19176,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)other); @@ -18403,7 +19213,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -18420,7 +19231,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -18428,54 +19239,83 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReplicateWALEntryResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReplicateWALEntryResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse DEFAULT_INSTANCE; static { - defaultInstance = new ReplicateWALEntryResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReplicateWALEntryResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicateWALEntryResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReplicateWALEntryResponse) } - public interface RollWALWriterRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RollWALWriterRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RollWALWriterRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.RollWALWriterRequest} */ - public static final class RollWALWriterRequest extends - com.google.protobuf.GeneratedMessage - implements RollWALWriterRequestOrBuilder { + public static final class RollWALWriterRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RollWALWriterRequest) + RollWALWriterRequestOrBuilder { // Use RollWALWriterRequest.newBuilder() to construct. - private RollWALWriterRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private RollWALWriterRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RollWALWriterRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RollWALWriterRequest defaultInstance; - public static RollWALWriterRequest getDefaultInstance() { - return defaultInstance; + private RollWALWriterRequest() { } - public RollWALWriterRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RollWALWriterRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -18499,7 +19339,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -18510,34 +19350,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RollWALWriterRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RollWALWriterRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -18545,29 +19369,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -18578,12 +19394,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -18591,7 +19405,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -18619,46 +19433,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -18666,14 +19491,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.RollWALWriterRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RollWALWriterRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -18686,27 +19512,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterRequest_descriptor; @@ -18730,6 +19549,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest)other); @@ -18741,7 +19586,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -18758,7 +19604,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -18766,86 +19612,114 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RollWALWriterRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.RollWALWriterRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest DEFAULT_INSTANCE; static { - defaultInstance = new RollWALWriterRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RollWALWriterRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RollWALWriterRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RollWALWriterRequest) } - public interface RollWALWriterResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RollWALWriterResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RollWALWriterResponse) + com.google.protobuf.MessageOrBuilder { - // repeated bytes region_to_flush = 1; /** - * repeated bytes region_to_flush = 1; - * *
      * A list of encoded name of regions to flush
      * 
+ * + * repeated bytes region_to_flush = 1; */ java.util.List getRegionToFlushList(); /** - * repeated bytes region_to_flush = 1; - * *
      * A list of encoded name of regions to flush
      * 
+ * + * repeated bytes region_to_flush = 1; */ int getRegionToFlushCount(); /** - * repeated bytes region_to_flush = 1; - * *
      * A list of encoded name of regions to flush
      * 
+ * + * repeated bytes region_to_flush = 1; */ com.google.protobuf.ByteString getRegionToFlush(int index); } /** - * Protobuf type {@code hbase.pb.RollWALWriterResponse} - * *
-   *
    * Roll request responses no longer include regions to flush
    * this list will always be empty when talking to a 1.0 server
    * 
+ * + * Protobuf type {@code hbase.pb.RollWALWriterResponse} */ - public static final class RollWALWriterResponse extends - com.google.protobuf.GeneratedMessage - implements RollWALWriterResponseOrBuilder { + public static final class RollWALWriterResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RollWALWriterResponse) + RollWALWriterResponseOrBuilder { // Use RollWALWriterResponse.newBuilder() to construct. - private RollWALWriterResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private RollWALWriterResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RollWALWriterResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RollWALWriterResponse defaultInstance; - public static RollWALWriterResponse getDefaultInstance() { - return defaultInstance; - } - - public RollWALWriterResponse getDefaultInstanceForType() { - return defaultInstance; + private RollWALWriterResponse() { + regionToFlush_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RollWALWriterResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -18878,7 +19752,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_); @@ -18892,70 +19766,52 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RollWALWriterResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RollWALWriterResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated bytes region_to_flush = 1; public static final int REGION_TO_FLUSH_FIELD_NUMBER = 1; private java.util.List regionToFlush_; /** - * repeated bytes region_to_flush = 1; - * *
      * A list of encoded name of regions to flush
      * 
+ * + * repeated bytes region_to_flush = 1; */ public java.util.List getRegionToFlushList() { return regionToFlush_; } /** - * repeated bytes region_to_flush = 1; - * *
      * A list of encoded name of regions to flush
      * 
+ * + * repeated bytes region_to_flush = 1; */ public int getRegionToFlushCount() { return regionToFlush_.size(); } /** - * repeated bytes region_to_flush = 1; - * *
      * A list of encoded name of regions to flush
      * 
+ * + * repeated bytes region_to_flush = 1; */ public com.google.protobuf.ByteString getRegionToFlush(int index) { return regionToFlush_.get(index); } - private void initFields() { - regionToFlush_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -18963,16 +19819,14 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < regionToFlush_.size(); i++) { output.writeBytes(1, regionToFlush_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -18985,19 +19839,13 @@ public final class AdminProtos { size += dataSize; size += 1 * getRegionToFlushList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -19010,12 +19858,10 @@ public final class AdminProtos { boolean result = true; result = result && getRegionToFlushList() .equals(other.getRegionToFlushList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -19027,7 +19873,7 @@ public final class AdminProtos { hash = (37 * hash) + REGION_TO_FLUSH_FIELD_NUMBER; hash = (53 * hash) + getRegionToFlushList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -19055,67 +19901,78 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RollWALWriterResponse} - * *
-     *
      * Roll request responses no longer include regions to flush
      * this list will always be empty when talking to a 1.0 server
      * 
+ * + * Protobuf type {@code hbase.pb.RollWALWriterResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RollWALWriterResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -19128,18 +19985,15 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); regionToFlush_ = java.util.Collections.emptyList(); @@ -19147,10 +20001,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_RollWALWriterResponse_descriptor; @@ -19180,6 +20030,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse)other); @@ -19201,7 +20077,8 @@ public final class AdminProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -19218,7 +20095,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -19228,7 +20105,6 @@ public final class AdminProtos { } private int bitField0_; - // repeated bytes region_to_flush = 1; private java.util.List regionToFlush_ = java.util.Collections.emptyList(); private void ensureRegionToFlushIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { @@ -19237,42 +20113,42 @@ public final class AdminProtos { } } /** - * repeated bytes region_to_flush = 1; - * *
        * A list of encoded name of regions to flush
        * 
+ * + * repeated bytes region_to_flush = 1; */ public java.util.List getRegionToFlushList() { return java.util.Collections.unmodifiableList(regionToFlush_); } /** - * repeated bytes region_to_flush = 1; - * *
        * A list of encoded name of regions to flush
        * 
+ * + * repeated bytes region_to_flush = 1; */ public int getRegionToFlushCount() { return regionToFlush_.size(); } /** - * repeated bytes region_to_flush = 1; - * *
        * A list of encoded name of regions to flush
        * 
+ * + * repeated bytes region_to_flush = 1; */ public com.google.protobuf.ByteString getRegionToFlush(int index) { return regionToFlush_.get(index); } /** - * repeated bytes region_to_flush = 1; - * *
        * A list of encoded name of regions to flush
        * 
+ * + * repeated bytes region_to_flush = 1; */ public Builder setRegionToFlush( int index, com.google.protobuf.ByteString value) { @@ -19285,11 +20161,11 @@ public final class AdminProtos { return this; } /** - * repeated bytes region_to_flush = 1; - * *
        * A list of encoded name of regions to flush
        * 
+ * + * repeated bytes region_to_flush = 1; */ public Builder addRegionToFlush(com.google.protobuf.ByteString value) { if (value == null) { @@ -19301,25 +20177,26 @@ public final class AdminProtos { return this; } /** - * repeated bytes region_to_flush = 1; - * *
        * A list of encoded name of regions to flush
        * 
+ * + * repeated bytes region_to_flush = 1; */ public Builder addAllRegionToFlush( java.lang.Iterable values) { ensureRegionToFlushIsMutable(); - super.addAll(values, regionToFlush_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionToFlush_); onChanged(); return this; } /** - * repeated bytes region_to_flush = 1; - * *
        * A list of encoded name of regions to flush
        * 
+ * + * repeated bytes region_to_flush = 1; */ public Builder clearRegionToFlush() { regionToFlush_ = java.util.Collections.emptyList(); @@ -19327,22 +20204,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RollWALWriterResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.RollWALWriterResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse DEFAULT_INSTANCE; static { - defaultInstance = new RollWALWriterResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RollWALWriterResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RollWALWriterResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RollWALWriterResponse) } - public interface StopServerRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StopServerRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.StopServerRequest) + com.google.protobuf.MessageOrBuilder { - // required string reason = 1; /** * required string reason = 1; */ @@ -19360,36 +20274,28 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.StopServerRequest} */ - public static final class StopServerRequest extends - com.google.protobuf.GeneratedMessage - implements StopServerRequestOrBuilder { + public static final class StopServerRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.StopServerRequest) + StopServerRequestOrBuilder { // Use StopServerRequest.newBuilder() to construct. - private StopServerRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private StopServerRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StopServerRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StopServerRequest defaultInstance; - public static StopServerRequest getDefaultInstance() { - return defaultInstance; } - - public StopServerRequest getDefaultInstanceForType() { - return defaultInstance; + private StopServerRequest() { + reason_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StopServerRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -19409,8 +20315,9 @@ public final class AdminProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - reason_ = input.readBytes(); + reason_ = bs; break; } } @@ -19419,7 +20326,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -19430,32 +20337,16 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StopServerRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StopServerRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string reason = 1; public static final int REASON_FIELD_NUMBER = 1; - private java.lang.Object reason_; + private volatile java.lang.Object reason_; /** * required string reason = 1; */ @@ -19496,13 +20387,11 @@ public final class AdminProtos { } } - private void initFields() { - reason_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasReason()) { memoizedIsInitialized = 0; @@ -19514,36 +20403,27 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getReasonBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, reason_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getReasonBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, reason_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -19559,12 +20439,10 @@ public final class AdminProtos { result = result && getReason() .equals(other.getReason()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -19576,7 +20454,7 @@ public final class AdminProtos { hash = (37 * hash) + REASON_FIELD_NUMBER; hash = (53 * hash) + getReason().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -19604,46 +20482,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -19651,14 +20540,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.StopServerRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.StopServerRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -19671,18 +20561,15 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); reason_ = ""; @@ -19690,10 +20577,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerRequest_descriptor; @@ -19724,6 +20607,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest)other); @@ -19740,13 +20649,13 @@ public final class AdminProtos { reason_ = other.reason_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasReason()) { - return false; } return true; @@ -19761,7 +20670,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -19771,7 +20680,6 @@ public final class AdminProtos { } private int bitField0_; - // required string reason = 1; private java.lang.Object reason_ = ""; /** * required string reason = 1; @@ -19785,9 +20693,12 @@ public final class AdminProtos { public java.lang.String getReason() { java.lang.Object ref = reason_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - reason_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + reason_ = s; + } return s; } else { return (java.lang.String) ref; @@ -19844,54 +20755,83 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.StopServerRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.StopServerRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest DEFAULT_INSTANCE; static { - defaultInstance = new StopServerRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StopServerRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StopServerRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.StopServerRequest) } - public interface StopServerResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StopServerResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.StopServerResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.StopServerResponse} */ - public static final class StopServerResponse extends - com.google.protobuf.GeneratedMessage - implements StopServerResponseOrBuilder { + public static final class StopServerResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.StopServerResponse) + StopServerResponseOrBuilder { // Use StopServerResponse.newBuilder() to construct. - private StopServerResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private StopServerResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StopServerResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StopServerResponse defaultInstance; - public static StopServerResponse getDefaultInstance() { - return defaultInstance; } - - public StopServerResponse getDefaultInstanceForType() { - return defaultInstance; + private StopServerResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StopServerResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -19915,7 +20855,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -19926,34 +20866,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StopServerResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StopServerResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -19961,29 +20885,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -19994,12 +20910,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -20007,7 +20921,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -20035,46 +20949,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -20082,14 +21007,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.StopServerResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.StopServerResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -20102,27 +21028,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_StopServerResponse_descriptor; @@ -20146,6 +21065,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse)other); @@ -20157,7 +21102,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -20174,7 +21120,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -20182,54 +21128,83 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.StopServerResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.StopServerResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse DEFAULT_INSTANCE; static { - defaultInstance = new StopServerResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StopServerResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StopServerResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.StopServerResponse) } - public interface GetServerInfoRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetServerInfoRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetServerInfoRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.GetServerInfoRequest} */ - public static final class GetServerInfoRequest extends - com.google.protobuf.GeneratedMessage - implements GetServerInfoRequestOrBuilder { + public static final class GetServerInfoRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetServerInfoRequest) + GetServerInfoRequestOrBuilder { // Use GetServerInfoRequest.newBuilder() to construct. - private GetServerInfoRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetServerInfoRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetServerInfoRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetServerInfoRequest defaultInstance; - public static GetServerInfoRequest getDefaultInstance() { - return defaultInstance; - } - - public GetServerInfoRequest getDefaultInstanceForType() { - return defaultInstance; + private GetServerInfoRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetServerInfoRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -20253,7 +21228,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -20264,34 +21239,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetServerInfoRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetServerInfoRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -20299,29 +21258,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -20332,12 +21283,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -20345,7 +21294,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -20373,46 +21322,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -20420,14 +21380,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.GetServerInfoRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetServerInfoRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -20440,27 +21401,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoRequest_descriptor; @@ -20484,6 +21438,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest)other); @@ -20495,7 +21475,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -20512,7 +21493,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -20520,22 +21501,59 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.GetServerInfoRequest) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.GetServerInfoRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetServerInfoRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetServerInfoRequest(input, extensionRegistry); + } + }; - // @@protoc_insertion_point(builder_scope:hbase.pb.GetServerInfoRequest) + public static com.google.protobuf.Parser parser() { + return PARSER; } - static { - defaultInstance = new GetServerInfoRequest(true); - defaultInstance.initFields(); + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetServerInfoRequest) } - public interface ServerInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ServerInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ServerInfo) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ServerName server_name = 1; /** * required .hbase.pb.ServerName server_name = 1; */ @@ -20549,7 +21567,6 @@ public final class AdminProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); - // optional uint32 webui_port = 2; /** * optional uint32 webui_port = 2; */ @@ -20562,36 +21579,28 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.ServerInfo} */ - public static final class ServerInfo extends - com.google.protobuf.GeneratedMessage - implements ServerInfoOrBuilder { + public static final class ServerInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ServerInfo) + ServerInfoOrBuilder { // Use ServerInfo.newBuilder() to construct. - private ServerInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + private ServerInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ServerInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ServerInfo defaultInstance; - public static ServerInfo getDefaultInstance() { - return defaultInstance; } - - public ServerInfo getDefaultInstanceForType() { - return defaultInstance; + private ServerInfo() { + webuiPort_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ServerInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -20634,7 +21643,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -20645,30 +21654,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ServerInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ServerInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ServerInfo parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ServerInfo(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ServerName server_name = 1; public static final int SERVER_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_; /** @@ -20681,16 +21674,15 @@ public final class AdminProtos { * required .hbase.pb.ServerName server_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName() { - return serverName_; + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } /** * required .hbase.pb.ServerName server_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - return serverName_; + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } - // optional uint32 webui_port = 2; public static final int WEBUI_PORT_FIELD_NUMBER = 2; private int webuiPort_; /** @@ -20706,14 +21698,11 @@ public final class AdminProtos { return webuiPort_; } - private void initFields() { - serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - webuiPort_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasServerName()) { memoizedIsInitialized = 0; @@ -20729,43 +21718,35 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, serverName_); + output.writeMessage(1, getServerName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, webuiPort_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, serverName_); + .computeMessageSize(1, getServerName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(2, webuiPort_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -20786,12 +21767,10 @@ public final class AdminProtos { result = result && (getWebuiPort() == other.getWebuiPort()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -20807,7 +21786,7 @@ public final class AdminProtos { hash = (37 * hash) + WEBUI_PORT_FIELD_NUMBER; hash = (53 * hash) + getWebuiPort(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -20835,46 +21814,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -20882,14 +21872,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.ServerInfo} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ServerInfo) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ServerInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ServerInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -20902,23 +21893,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getServerNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + serverName_ = null; } else { serverNameBuilder_.clear(); } @@ -20928,10 +21916,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_ServerInfo_descriptor; @@ -20970,6 +21954,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo)other); @@ -20987,17 +21997,16 @@ public final class AdminProtos { if (other.hasWebuiPort()) { setWebuiPort(other.getWebuiPort()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasServerName()) { - return false; } if (!getServerName().isInitialized()) { - return false; } return true; @@ -21012,7 +22021,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -21022,9 +22031,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.ServerName server_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; /** * required .hbase.pb.ServerName server_name = 1; @@ -21037,7 +22045,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName() { if (serverNameBuilder_ == null) { - return serverName_; + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } else { return serverNameBuilder_.getMessage(); } @@ -21078,6 +22086,7 @@ public final class AdminProtos { public Builder mergeServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + serverName_ != null && serverName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); @@ -21096,7 +22105,7 @@ public final class AdminProtos { */ public Builder clearServerName() { if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + serverName_ = null; onChanged(); } else { serverNameBuilder_.clear(); @@ -21119,19 +22128,20 @@ public final class AdminProtos { if (serverNameBuilder_ != null) { return serverNameBuilder_.getMessageOrBuilder(); } else { - return serverName_; + return serverName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } } /** * required .hbase.pb.ServerName server_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerNameFieldBuilder() { if (serverNameBuilder_ == null) { - serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - serverName_, + getServerName(), getParentForChildren(), isClean()); serverName_ = null; @@ -21139,7 +22149,6 @@ public final class AdminProtos { return serverNameBuilder_; } - // optional uint32 webui_port = 2; private int webuiPort_ ; /** * optional uint32 webui_port = 2; @@ -21171,22 +22180,59 @@ public final class AdminProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ServerInfo) } + // @@protoc_insertion_point(class_scope:hbase.pb.ServerInfo) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo DEFAULT_INSTANCE; static { - defaultInstance = new ServerInfo(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ServerInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ServerInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ServerInfo) } - public interface GetServerInfoResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetServerInfoResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetServerInfoResponse) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ServerInfo server_info = 1; /** * required .hbase.pb.ServerInfo server_info = 1; */ @@ -21203,36 +22249,27 @@ public final class AdminProtos { /** * Protobuf type {@code hbase.pb.GetServerInfoResponse} */ - public static final class GetServerInfoResponse extends - com.google.protobuf.GeneratedMessage - implements GetServerInfoResponseOrBuilder { + public static final class GetServerInfoResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetServerInfoResponse) + GetServerInfoResponseOrBuilder { // Use GetServerInfoResponse.newBuilder() to construct. - private GetServerInfoResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetServerInfoResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetServerInfoResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetServerInfoResponse defaultInstance; - public static GetServerInfoResponse getDefaultInstance() { - return defaultInstance; } - - public GetServerInfoResponse getDefaultInstanceForType() { - return defaultInstance; + private GetServerInfoResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetServerInfoResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -21270,7 +22307,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -21281,30 +22318,14 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetServerInfoResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetServerInfoResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ServerInfo server_info = 1; public static final int SERVER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo serverInfo_; /** @@ -21317,22 +22338,20 @@ public final class AdminProtos { * required .hbase.pb.ServerInfo server_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo getServerInfo() { - return serverInfo_; + return serverInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance() : serverInfo_; } /** * required .hbase.pb.ServerInfo server_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder() { - return serverInfo_; + return serverInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance() : serverInfo_; } - private void initFields() { - serverInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasServerInfo()) { memoizedIsInitialized = 0; @@ -21348,36 +22367,28 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, serverInfo_); + output.writeMessage(1, getServerInfo()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, serverInfo_); + .computeMessageSize(1, getServerInfo()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -21393,12 +22404,10 @@ public final class AdminProtos { result = result && getServerInfo() .equals(other.getServerInfo()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -21410,7 +22419,7 @@ public final class AdminProtos { hash = (37 * hash) + SERVER_INFO_FIELD_NUMBER; hash = (53 * hash) + getServerInfo().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -21438,46 +22447,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -21485,14 +22505,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.GetServerInfoResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetServerInfoResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -21505,23 +22526,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getServerInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (serverInfoBuilder_ == null) { - serverInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); + serverInfo_ = null; } else { serverInfoBuilder_.clear(); } @@ -21529,10 +22547,6 @@ public final class AdminProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_GetServerInfoResponse_descriptor; @@ -21567,6 +22581,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse)other); @@ -21581,17 +22621,16 @@ public final class AdminProtos { if (other.hasServerInfo()) { mergeServerInfo(other.getServerInfo()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasServerInfo()) { - return false; } if (!getServerInfo().isInitialized()) { - return false; } return true; @@ -21606,7 +22645,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -21616,9 +22655,8 @@ public final class AdminProtos { } private int bitField0_; - // required .hbase.pb.ServerInfo server_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo serverInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo serverInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfoOrBuilder> serverInfoBuilder_; /** * required .hbase.pb.ServerInfo server_info = 1; @@ -21631,7 +22669,7 @@ public final class AdminProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo getServerInfo() { if (serverInfoBuilder_ == null) { - return serverInfo_; + return serverInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance() : serverInfo_; } else { return serverInfoBuilder_.getMessage(); } @@ -21672,6 +22710,7 @@ public final class AdminProtos { public Builder mergeServerInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo value) { if (serverInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + serverInfo_ != null && serverInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance()) { serverInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.newBuilder(serverInfo_).mergeFrom(value).buildPartial(); @@ -21690,7 +22729,7 @@ public final class AdminProtos { */ public Builder clearServerInfo() { if (serverInfoBuilder_ == null) { - serverInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance(); + serverInfo_ = null; onChanged(); } else { serverInfoBuilder_.clear(); @@ -21713,73 +22752,103 @@ public final class AdminProtos { if (serverInfoBuilder_ != null) { return serverInfoBuilder_.getMessageOrBuilder(); } else { - return serverInfo_; + return serverInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance() : serverInfo_; } } /** * required .hbase.pb.ServerInfo server_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfoOrBuilder> getServerInfoFieldBuilder() { if (serverInfoBuilder_ == null) { - serverInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfoOrBuilder>( - serverInfo_, + getServerInfo(), getParentForChildren(), isClean()); serverInfo_ = null; } return serverInfoBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetServerInfoResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetServerInfoResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetServerInfoResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetServerInfoResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetServerInfoResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetServerInfoResponse) } - public interface UpdateConfigurationRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface UpdateConfigurationRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.UpdateConfigurationRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.UpdateConfigurationRequest} */ - public static final class UpdateConfigurationRequest extends - com.google.protobuf.GeneratedMessage - implements UpdateConfigurationRequestOrBuilder { + public static final class UpdateConfigurationRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.UpdateConfigurationRequest) + UpdateConfigurationRequestOrBuilder { // Use UpdateConfigurationRequest.newBuilder() to construct. - private UpdateConfigurationRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private UpdateConfigurationRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private UpdateConfigurationRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final UpdateConfigurationRequest defaultInstance; - public static UpdateConfigurationRequest getDefaultInstance() { - return defaultInstance; } - - public UpdateConfigurationRequest getDefaultInstanceForType() { - return defaultInstance; + private UpdateConfigurationRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private UpdateConfigurationRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -21803,7 +22872,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -21814,34 +22883,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public UpdateConfigurationRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new UpdateConfigurationRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -21849,29 +22902,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -21882,12 +22927,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -21895,7 +22938,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -21923,46 +22966,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -21970,14 +23024,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.UpdateConfigurationRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.UpdateConfigurationRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -21990,27 +23045,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationRequest_descriptor; @@ -22034,6 +23082,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest)other); @@ -22045,7 +23119,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -22062,7 +23137,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -22070,54 +23145,83 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.UpdateConfigurationRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.UpdateConfigurationRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest DEFAULT_INSTANCE; static { - defaultInstance = new UpdateConfigurationRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public UpdateConfigurationRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UpdateConfigurationRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.UpdateConfigurationRequest) } - public interface UpdateConfigurationResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface UpdateConfigurationResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.UpdateConfigurationResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.UpdateConfigurationResponse} */ - public static final class UpdateConfigurationResponse extends - com.google.protobuf.GeneratedMessage - implements UpdateConfigurationResponseOrBuilder { + public static final class UpdateConfigurationResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.UpdateConfigurationResponse) + UpdateConfigurationResponseOrBuilder { // Use UpdateConfigurationResponse.newBuilder() to construct. - private UpdateConfigurationResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private UpdateConfigurationResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private UpdateConfigurationResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final UpdateConfigurationResponse defaultInstance; - public static UpdateConfigurationResponse getDefaultInstance() { - return defaultInstance; } - - public UpdateConfigurationResponse getDefaultInstanceForType() { - return defaultInstance; + private UpdateConfigurationResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private UpdateConfigurationResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -22141,7 +23245,7 @@ public final class AdminProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -22152,34 +23256,18 @@ public final class AdminProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public UpdateConfigurationResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new UpdateConfigurationResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -22187,29 +23275,21 @@ public final class AdminProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -22220,12 +23300,10 @@ public final class AdminProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -22233,7 +23311,7 @@ public final class AdminProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -22261,46 +23339,57 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -22308,14 +23397,15 @@ public final class AdminProtos { * Protobuf type {@code hbase.pb.UpdateConfigurationResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.UpdateConfigurationResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -22328,27 +23418,20 @@ public final class AdminProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.internal_static_hbase_pb_UpdateConfigurationResponse_descriptor; @@ -22372,6 +23455,32 @@ public final class AdminProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse)other); @@ -22383,7 +23492,8 @@ public final class AdminProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -22400,7 +23510,7 @@ public final class AdminProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -22408,16 +23518,53 @@ public final class AdminProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.UpdateConfigurationResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.UpdateConfigurationResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse DEFAULT_INSTANCE; static { - defaultInstance = new UpdateConfigurationResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public UpdateConfigurationResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UpdateConfigurationResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.UpdateConfigurationResponse) } /** @@ -23788,192 +24935,192 @@ public final class AdminProtos { // @@protoc_insertion_point(class_scope:hbase.pb.AdminService) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetRegionInfoRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetRegionInfoRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetRegionInfoResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetRegionInfoResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetStoreFileRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetStoreFileRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetStoreFileResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetStoreFileResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetOnlineRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetOnlineRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetOnlineRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetOnlineRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_OpenRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_OpenRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_OpenRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_OpenRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WarmupRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WarmupRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WarmupRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WarmupRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CloseRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CloseRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CloseRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CloseRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FlushRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FlushRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FlushRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FlushRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SplitRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SplitRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SplitRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SplitRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CompactRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CompactRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CompactRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CompactRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UpdateFavoredNodesRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UpdateFavoredNodesResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MergeRegionsRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MergeRegionsRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MergeRegionsResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MergeRegionsResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WALEntry_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WALEntry_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReplicateWALEntryRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReplicateWALEntryResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RollWALWriterRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RollWALWriterRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RollWALWriterResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RollWALWriterResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StopServerRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_StopServerRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StopServerResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_StopServerResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetServerInfoRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetServerInfoRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ServerInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ServerInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetServerInfoResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetServerInfoResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UpdateConfigurationRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UpdateConfigurationRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UpdateConfigurationResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UpdateConfigurationResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -24091,235 +25238,237 @@ public final class AdminProtos { "\013AdminProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_GetRegionInfoRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_GetRegionInfoRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetRegionInfoRequest_descriptor, - new java.lang.String[] { "Region", "CompactionState", }); - internal_static_hbase_pb_GetRegionInfoResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_GetRegionInfoResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetRegionInfoResponse_descriptor, - new java.lang.String[] { "RegionInfo", "CompactionState", "IsRecovering", }); - internal_static_hbase_pb_GetStoreFileRequest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_GetStoreFileRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetStoreFileRequest_descriptor, - new java.lang.String[] { "Region", "Family", }); - internal_static_hbase_pb_GetStoreFileResponse_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_GetStoreFileResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetStoreFileResponse_descriptor, - new java.lang.String[] { "StoreFile", }); - internal_static_hbase_pb_GetOnlineRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_GetOnlineRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetOnlineRegionRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_GetOnlineRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_GetOnlineRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetOnlineRegionResponse_descriptor, - new java.lang.String[] { "RegionInfo", }); - internal_static_hbase_pb_OpenRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_OpenRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_OpenRegionRequest_descriptor, - new java.lang.String[] { "OpenInfo", "ServerStartCode", "MasterSystemTime", }); - internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor = - internal_static_hbase_pb_OpenRegionRequest_descriptor.getNestedTypes().get(0); - internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor, - new java.lang.String[] { "Region", "VersionOfOfflineNode", "FavoredNodes", "OpenForDistributedLogReplay", }); - internal_static_hbase_pb_OpenRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_OpenRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_OpenRegionResponse_descriptor, - new java.lang.String[] { "OpeningState", }); - internal_static_hbase_pb_WarmupRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_WarmupRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_WarmupRegionRequest_descriptor, - new java.lang.String[] { "RegionInfo", }); - internal_static_hbase_pb_WarmupRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_hbase_pb_WarmupRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_WarmupRegionResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_CloseRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_hbase_pb_CloseRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CloseRegionRequest_descriptor, - new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", "DestinationServer", "ServerStartCode", }); - internal_static_hbase_pb_CloseRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_hbase_pb_CloseRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CloseRegionResponse_descriptor, - new java.lang.String[] { "Closed", }); - internal_static_hbase_pb_FlushRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_hbase_pb_FlushRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FlushRegionRequest_descriptor, - new java.lang.String[] { "Region", "IfOlderThanTs", "WriteFlushWalMarker", }); - internal_static_hbase_pb_FlushRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_hbase_pb_FlushRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FlushRegionResponse_descriptor, - new java.lang.String[] { "LastFlushTime", "Flushed", "WroteFlushWalMarker", }); - internal_static_hbase_pb_SplitRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_hbase_pb_SplitRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SplitRegionRequest_descriptor, - new java.lang.String[] { "Region", "SplitPoint", }); - internal_static_hbase_pb_SplitRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_hbase_pb_SplitRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SplitRegionResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_CompactRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_hbase_pb_CompactRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CompactRegionRequest_descriptor, - new java.lang.String[] { "Region", "Major", "Family", }); - internal_static_hbase_pb_CompactRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_hbase_pb_CompactRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CompactRegionResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_hbase_pb_UpdateFavoredNodesRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor, - new java.lang.String[] { "UpdateInfo", }); - internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor = - internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor.getNestedTypes().get(0); - internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor, - new java.lang.String[] { "Region", "FavoredNodes", }); - internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_hbase_pb_UpdateFavoredNodesResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor, - new java.lang.String[] { "Response", }); - internal_static_hbase_pb_MergeRegionsRequest_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_hbase_pb_MergeRegionsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MergeRegionsRequest_descriptor, - new java.lang.String[] { "RegionA", "RegionB", "Forcible", "MasterSystemTime", }); - internal_static_hbase_pb_MergeRegionsResponse_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_hbase_pb_MergeRegionsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MergeRegionsResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_WALEntry_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_hbase_pb_WALEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_WALEntry_descriptor, - new java.lang.String[] { "Key", "KeyValueBytes", "AssociatedCellCount", }); - internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_hbase_pb_ReplicateWALEntryRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor, - new java.lang.String[] { "Entry", "ReplicationClusterId", "SourceBaseNamespaceDirPath", "SourceHFileArchiveDirPath", }); - internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_hbase_pb_ReplicateWALEntryResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_RollWALWriterRequest_descriptor = - getDescriptor().getMessageTypes().get(25); - internal_static_hbase_pb_RollWALWriterRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RollWALWriterRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_RollWALWriterResponse_descriptor = - getDescriptor().getMessageTypes().get(26); - internal_static_hbase_pb_RollWALWriterResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RollWALWriterResponse_descriptor, - new java.lang.String[] { "RegionToFlush", }); - internal_static_hbase_pb_StopServerRequest_descriptor = - getDescriptor().getMessageTypes().get(27); - internal_static_hbase_pb_StopServerRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_StopServerRequest_descriptor, - new java.lang.String[] { "Reason", }); - internal_static_hbase_pb_StopServerResponse_descriptor = - getDescriptor().getMessageTypes().get(28); - internal_static_hbase_pb_StopServerResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_StopServerResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_GetServerInfoRequest_descriptor = - getDescriptor().getMessageTypes().get(29); - internal_static_hbase_pb_GetServerInfoRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetServerInfoRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_ServerInfo_descriptor = - getDescriptor().getMessageTypes().get(30); - internal_static_hbase_pb_ServerInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ServerInfo_descriptor, - new java.lang.String[] { "ServerName", "WebuiPort", }); - internal_static_hbase_pb_GetServerInfoResponse_descriptor = - getDescriptor().getMessageTypes().get(31); - internal_static_hbase_pb_GetServerInfoResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetServerInfoResponse_descriptor, - new java.lang.String[] { "ServerInfo", }); - internal_static_hbase_pb_UpdateConfigurationRequest_descriptor = - getDescriptor().getMessageTypes().get(32); - internal_static_hbase_pb_UpdateConfigurationRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_UpdateConfigurationRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_UpdateConfigurationResponse_descriptor = - getDescriptor().getMessageTypes().get(33); - internal_static_hbase_pb_UpdateConfigurationResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_UpdateConfigurationResponse_descriptor, - new java.lang.String[] { }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_GetRegionInfoRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_GetRegionInfoRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetRegionInfoRequest_descriptor, + new java.lang.String[] { "Region", "CompactionState", }); + internal_static_hbase_pb_GetRegionInfoResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_GetRegionInfoResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetRegionInfoResponse_descriptor, + new java.lang.String[] { "RegionInfo", "CompactionState", "IsRecovering", }); + internal_static_hbase_pb_GetStoreFileRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_GetStoreFileRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetStoreFileRequest_descriptor, + new java.lang.String[] { "Region", "Family", }); + internal_static_hbase_pb_GetStoreFileResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_GetStoreFileResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetStoreFileResponse_descriptor, + new java.lang.String[] { "StoreFile", }); + internal_static_hbase_pb_GetOnlineRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_GetOnlineRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetOnlineRegionRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_GetOnlineRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_GetOnlineRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetOnlineRegionResponse_descriptor, + new java.lang.String[] { "RegionInfo", }); + internal_static_hbase_pb_OpenRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_OpenRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_OpenRegionRequest_descriptor, + new java.lang.String[] { "OpenInfo", "ServerStartCode", "MasterSystemTime", }); + internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor = + internal_static_hbase_pb_OpenRegionRequest_descriptor.getNestedTypes().get(0); + internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_OpenRegionRequest_RegionOpenInfo_descriptor, + new java.lang.String[] { "Region", "VersionOfOfflineNode", "FavoredNodes", "OpenForDistributedLogReplay", }); + internal_static_hbase_pb_OpenRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_OpenRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_OpenRegionResponse_descriptor, + new java.lang.String[] { "OpeningState", }); + internal_static_hbase_pb_WarmupRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_WarmupRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_WarmupRegionRequest_descriptor, + new java.lang.String[] { "RegionInfo", }); + internal_static_hbase_pb_WarmupRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_hbase_pb_WarmupRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_WarmupRegionResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_CloseRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_hbase_pb_CloseRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CloseRegionRequest_descriptor, + new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", "DestinationServer", "ServerStartCode", }); + internal_static_hbase_pb_CloseRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_hbase_pb_CloseRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CloseRegionResponse_descriptor, + new java.lang.String[] { "Closed", }); + internal_static_hbase_pb_FlushRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_hbase_pb_FlushRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FlushRegionRequest_descriptor, + new java.lang.String[] { "Region", "IfOlderThanTs", "WriteFlushWalMarker", }); + internal_static_hbase_pb_FlushRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_hbase_pb_FlushRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FlushRegionResponse_descriptor, + new java.lang.String[] { "LastFlushTime", "Flushed", "WroteFlushWalMarker", }); + internal_static_hbase_pb_SplitRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_hbase_pb_SplitRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SplitRegionRequest_descriptor, + new java.lang.String[] { "Region", "SplitPoint", }); + internal_static_hbase_pb_SplitRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_hbase_pb_SplitRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SplitRegionResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_CompactRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_hbase_pb_CompactRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CompactRegionRequest_descriptor, + new java.lang.String[] { "Region", "Major", "Family", }); + internal_static_hbase_pb_CompactRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_hbase_pb_CompactRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CompactRegionResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_hbase_pb_UpdateFavoredNodesRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor, + new java.lang.String[] { "UpdateInfo", }); + internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor = + internal_static_hbase_pb_UpdateFavoredNodesRequest_descriptor.getNestedTypes().get(0); + internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_UpdateFavoredNodesRequest_RegionUpdateInfo_descriptor, + new java.lang.String[] { "Region", "FavoredNodes", }); + internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_hbase_pb_UpdateFavoredNodesResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_UpdateFavoredNodesResponse_descriptor, + new java.lang.String[] { "Response", }); + internal_static_hbase_pb_MergeRegionsRequest_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_hbase_pb_MergeRegionsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MergeRegionsRequest_descriptor, + new java.lang.String[] { "RegionA", "RegionB", "Forcible", "MasterSystemTime", }); + internal_static_hbase_pb_MergeRegionsResponse_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_hbase_pb_MergeRegionsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MergeRegionsResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_WALEntry_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_hbase_pb_WALEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_WALEntry_descriptor, + new java.lang.String[] { "Key", "KeyValueBytes", "AssociatedCellCount", }); + internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_hbase_pb_ReplicateWALEntryRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReplicateWALEntryRequest_descriptor, + new java.lang.String[] { "Entry", "ReplicationClusterId", "SourceBaseNamespaceDirPath", "SourceHFileArchiveDirPath", }); + internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_hbase_pb_ReplicateWALEntryResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReplicateWALEntryResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_RollWALWriterRequest_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_hbase_pb_RollWALWriterRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RollWALWriterRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_RollWALWriterResponse_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_hbase_pb_RollWALWriterResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RollWALWriterResponse_descriptor, + new java.lang.String[] { "RegionToFlush", }); + internal_static_hbase_pb_StopServerRequest_descriptor = + getDescriptor().getMessageTypes().get(27); + internal_static_hbase_pb_StopServerRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_StopServerRequest_descriptor, + new java.lang.String[] { "Reason", }); + internal_static_hbase_pb_StopServerResponse_descriptor = + getDescriptor().getMessageTypes().get(28); + internal_static_hbase_pb_StopServerResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_StopServerResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_GetServerInfoRequest_descriptor = + getDescriptor().getMessageTypes().get(29); + internal_static_hbase_pb_GetServerInfoRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetServerInfoRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_ServerInfo_descriptor = + getDescriptor().getMessageTypes().get(30); + internal_static_hbase_pb_ServerInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ServerInfo_descriptor, + new java.lang.String[] { "ServerName", "WebuiPort", }); + internal_static_hbase_pb_GetServerInfoResponse_descriptor = + getDescriptor().getMessageTypes().get(31); + internal_static_hbase_pb_GetServerInfoResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetServerInfoResponse_descriptor, + new java.lang.String[] { "ServerInfo", }); + internal_static_hbase_pb_UpdateConfigurationRequest_descriptor = + getDescriptor().getMessageTypes().get(32); + internal_static_hbase_pb_UpdateConfigurationRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_UpdateConfigurationRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_UpdateConfigurationResponse_descriptor = + getDescriptor().getMessageTypes().get(33); + internal_static_hbase_pb_UpdateConfigurationResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_UpdateConfigurationResponse_descriptor, + new java.lang.String[] { }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java index 5b86b1d..f049d50 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java @@ -6,46 +6,52 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class CellProtos { private CellProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } /** - * Protobuf enum {@code hbase.pb.CellType} - * *
    **
    * The type of the key in a Cell
    * 
+ * + * Protobuf enum {@code hbase.pb.CellType} */ public enum CellType implements com.google.protobuf.ProtocolMessageEnum { /** * MINIMUM = 0; */ - MINIMUM(0, 0), + MINIMUM(0), /** * PUT = 4; */ - PUT(1, 4), + PUT(4), /** * DELETE = 8; */ - DELETE(2, 8), + DELETE(8), /** * DELETE_COLUMN = 12; */ - DELETE_COLUMN(3, 12), + DELETE_COLUMN(12), /** * DELETE_FAMILY = 14; */ - DELETE_FAMILY(4, 14), + DELETE_FAMILY(14), /** - * MAXIMUM = 255; - * *
      * MAXIMUM is used when searching; you look from maximum on down.
      * 
+ * + * MAXIMUM = 255; */ - MAXIMUM(5, 255), + MAXIMUM(255), ; /** @@ -69,18 +75,28 @@ public final class CellProtos { */ public static final int DELETE_FAMILY_VALUE = 14; /** - * MAXIMUM = 255; - * *
      * MAXIMUM is used when searching; you look from maximum on down.
      * 
+ * + * MAXIMUM = 255; */ public static final int MAXIMUM_VALUE = 255; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static CellType valueOf(int value) { + return forNumber(value); + } + + public static CellType forNumber(int value) { switch (value) { case 0: return MINIMUM; case 4: return PUT; @@ -96,17 +112,17 @@ public final class CellProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + CellType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public CellType findValueByNumber(int number) { - return CellType.valueOf(number); + return CellType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -128,21 +144,19 @@ public final class CellProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private CellType(int index, int value) { - this.index = index; + private CellType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.CellType) } - public interface CellOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CellOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Cell) + com.google.protobuf.MessageOrBuilder { - // optional bytes row = 1; /** * optional bytes row = 1; */ @@ -152,7 +166,6 @@ public final class CellProtos { */ com.google.protobuf.ByteString getRow(); - // optional bytes family = 2; /** * optional bytes family = 2; */ @@ -162,7 +175,6 @@ public final class CellProtos { */ com.google.protobuf.ByteString getFamily(); - // optional bytes qualifier = 3; /** * optional bytes qualifier = 3; */ @@ -172,7 +184,6 @@ public final class CellProtos { */ com.google.protobuf.ByteString getQualifier(); - // optional uint64 timestamp = 4; /** * optional uint64 timestamp = 4; */ @@ -182,7 +193,6 @@ public final class CellProtos { */ long getTimestamp(); - // optional .hbase.pb.CellType cell_type = 5; /** * optional .hbase.pb.CellType cell_type = 5; */ @@ -192,7 +202,6 @@ public final class CellProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType getCellType(); - // optional bytes value = 6; /** * optional bytes value = 6; */ @@ -202,7 +211,6 @@ public final class CellProtos { */ com.google.protobuf.ByteString getValue(); - // optional bytes tags = 7; /** * optional bytes tags = 7; */ @@ -213,43 +221,41 @@ public final class CellProtos { com.google.protobuf.ByteString getTags(); } /** - * Protobuf type {@code hbase.pb.Cell} - * *
    **
    * Protocol buffer version of Cell.
    * 
+ * + * Protobuf type {@code hbase.pb.Cell} */ - public static final class Cell extends - com.google.protobuf.GeneratedMessage - implements CellOrBuilder { + public static final class Cell extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Cell) + CellOrBuilder { // Use Cell.newBuilder() to construct. - private Cell(com.google.protobuf.GeneratedMessage.Builder builder) { + private Cell(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private Cell(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Cell defaultInstance; - public static Cell getDefaultInstance() { - return defaultInstance; } - - public Cell getDefaultInstanceForType() { - return defaultInstance; + private Cell() { + row_ = com.google.protobuf.ByteString.EMPTY; + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + timestamp_ = 0L; + cellType_ = 0; + value_ = com.google.protobuf.ByteString.EMPTY; + tags_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Cell( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -295,7 +301,7 @@ public final class CellProtos { unknownFields.mergeVarintField(5, rawValue); } else { bitField0_ |= 0x00000010; - cellType_ = value; + cellType_ = rawValue; } break; } @@ -315,7 +321,7 @@ public final class CellProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -326,30 +332,14 @@ public final class CellProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.internal_static_hbase_pb_Cell_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.internal_static_hbase_pb_Cell_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.class, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Cell parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Cell(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; /** @@ -365,7 +355,6 @@ public final class CellProtos { return row_; } - // optional bytes family = 2; public static final int FAMILY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString family_; /** @@ -381,7 +370,6 @@ public final class CellProtos { return family_; } - // optional bytes qualifier = 3; public static final int QUALIFIER_FIELD_NUMBER = 3; private com.google.protobuf.ByteString qualifier_; /** @@ -397,7 +385,6 @@ public final class CellProtos { return qualifier_; } - // optional uint64 timestamp = 4; public static final int TIMESTAMP_FIELD_NUMBER = 4; private long timestamp_; /** @@ -413,9 +400,8 @@ public final class CellProtos { return timestamp_; } - // optional .hbase.pb.CellType cell_type = 5; public static final int CELL_TYPE_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType cellType_; + private int cellType_; /** * optional .hbase.pb.CellType cell_type = 5; */ @@ -426,10 +412,10 @@ public final class CellProtos { * optional .hbase.pb.CellType cell_type = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType getCellType() { - return cellType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType result = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.valueOf(cellType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM : result; } - // optional bytes value = 6; public static final int VALUE_FIELD_NUMBER = 6; private com.google.protobuf.ByteString value_; /** @@ -445,7 +431,6 @@ public final class CellProtos { return value_; } - // optional bytes tags = 7; public static final int TAGS_FIELD_NUMBER = 7; private com.google.protobuf.ByteString tags_; /** @@ -461,19 +446,11 @@ public final class CellProtos { return tags_; } - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - timestamp_ = 0L; - cellType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM; - value_ = com.google.protobuf.ByteString.EMPTY; - tags_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -481,7 +458,6 @@ public final class CellProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, row_); } @@ -495,7 +471,7 @@ public final class CellProtos { output.writeUInt64(4, timestamp_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeEnum(5, cellType_.getNumber()); + output.writeEnum(5, cellType_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBytes(6, value_); @@ -503,12 +479,11 @@ public final class CellProtos { if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeBytes(7, tags_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -530,7 +505,7 @@ public final class CellProtos { } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(5, cellType_.getNumber()); + .computeEnumSize(5, cellType_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream @@ -540,19 +515,13 @@ public final class CellProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(7, tags_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -585,8 +554,7 @@ public final class CellProtos { } result = result && (hasCellType() == other.hasCellType()); if (hasCellType()) { - result = result && - (getCellType() == other.getCellType()); + result = result && cellType_ == other.cellType_; } result = result && (hasValue() == other.hasValue()); if (hasValue()) { @@ -598,12 +566,10 @@ public final class CellProtos { result = result && getTags() .equals(other.getTags()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -625,11 +591,12 @@ public final class CellProtos { } if (hasTimestamp()) { hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimestamp()); } if (hasCellType()) { hash = (37 * hash) + CELL_TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getCellType()); + hash = (53 * hash) + cellType_; } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; @@ -639,7 +606,7 @@ public final class CellProtos { hash = (37 * hash) + TAGS_FIELD_NUMBER; hash = (53 * hash) + getTags().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -667,66 +634,78 @@ public final class CellProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.Cell} - * *
      **
      * Protocol buffer version of Cell.
      * 
+ * + * Protobuf type {@code hbase.pb.Cell} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Cell) + org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.internal_static_hbase_pb_Cell_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.internal_static_hbase_pb_Cell_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -739,18 +718,15 @@ public final class CellProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -761,7 +737,7 @@ public final class CellProtos { bitField0_ = (bitField0_ & ~0x00000004); timestamp_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); - cellType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM; + cellType_ = 0; bitField0_ = (bitField0_ & ~0x00000010); value_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000020); @@ -770,10 +746,6 @@ public final class CellProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.internal_static_hbase_pb_Cell_descriptor; @@ -828,6 +800,32 @@ public final class CellProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell)other); @@ -860,7 +858,8 @@ public final class CellProtos { if (other.hasTags()) { setTags(other.getTags()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -877,7 +876,7 @@ public final class CellProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -887,7 +886,6 @@ public final class CellProtos { } private int bitField0_; - // optional bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes row = 1; @@ -923,7 +921,6 @@ public final class CellProtos { return this; } - // optional bytes family = 2; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes family = 2; @@ -959,7 +956,6 @@ public final class CellProtos { return this; } - // optional bytes qualifier = 3; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes qualifier = 3; @@ -995,7 +991,6 @@ public final class CellProtos { return this; } - // optional uint64 timestamp = 4; private long timestamp_ ; /** * optional uint64 timestamp = 4; @@ -1028,8 +1023,7 @@ public final class CellProtos { return this; } - // optional .hbase.pb.CellType cell_type = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType cellType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM; + private int cellType_ = 0; /** * optional .hbase.pb.CellType cell_type = 5; */ @@ -1040,7 +1034,8 @@ public final class CellProtos { * optional .hbase.pb.CellType cell_type = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType getCellType() { - return cellType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType result = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.valueOf(cellType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM : result; } /** * optional .hbase.pb.CellType cell_type = 5; @@ -1050,7 +1045,7 @@ public final class CellProtos { throw new NullPointerException(); } bitField0_ |= 0x00000010; - cellType_ = value; + cellType_ = value.getNumber(); onChanged(); return this; } @@ -1059,12 +1054,11 @@ public final class CellProtos { */ public Builder clearCellType() { bitField0_ = (bitField0_ & ~0x00000010); - cellType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM; + cellType_ = 0; onChanged(); return this; } - // optional bytes value = 6; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes value = 6; @@ -1100,7 +1094,6 @@ public final class CellProtos { return this; } - // optional bytes tags = 7; private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes tags = 7; @@ -1135,22 +1128,59 @@ public final class CellProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Cell) } + // @@protoc_insertion_point(class_scope:hbase.pb.Cell) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell DEFAULT_INSTANCE; static { - defaultInstance = new Cell(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Cell parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Cell(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Cell) } - public interface KeyValueOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface KeyValueOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.KeyValue) + com.google.protobuf.MessageOrBuilder { - // required bytes row = 1; /** * required bytes row = 1; */ @@ -1160,7 +1190,6 @@ public final class CellProtos { */ com.google.protobuf.ByteString getRow(); - // required bytes family = 2; /** * required bytes family = 2; */ @@ -1170,7 +1199,6 @@ public final class CellProtos { */ com.google.protobuf.ByteString getFamily(); - // required bytes qualifier = 3; /** * required bytes qualifier = 3; */ @@ -1180,7 +1208,6 @@ public final class CellProtos { */ com.google.protobuf.ByteString getQualifier(); - // optional uint64 timestamp = 4; /** * optional uint64 timestamp = 4; */ @@ -1190,7 +1217,6 @@ public final class CellProtos { */ long getTimestamp(); - // optional .hbase.pb.CellType key_type = 5; /** * optional .hbase.pb.CellType key_type = 5; */ @@ -1200,7 +1226,6 @@ public final class CellProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType getKeyType(); - // optional bytes value = 6; /** * optional bytes value = 6; */ @@ -1210,7 +1235,6 @@ public final class CellProtos { */ com.google.protobuf.ByteString getValue(); - // optional bytes tags = 7; /** * optional bytes tags = 7; */ @@ -1221,44 +1245,42 @@ public final class CellProtos { com.google.protobuf.ByteString getTags(); } /** - * Protobuf type {@code hbase.pb.KeyValue} - * *
    **
    * Protocol buffer version of KeyValue.
    * It doesn't have those transient parameters
    * 
+ * + * Protobuf type {@code hbase.pb.KeyValue} */ - public static final class KeyValue extends - com.google.protobuf.GeneratedMessage - implements KeyValueOrBuilder { + public static final class KeyValue extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.KeyValue) + KeyValueOrBuilder { // Use KeyValue.newBuilder() to construct. - private KeyValue(com.google.protobuf.GeneratedMessage.Builder builder) { + private KeyValue(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private KeyValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final KeyValue defaultInstance; - public static KeyValue getDefaultInstance() { - return defaultInstance; } - - public KeyValue getDefaultInstanceForType() { - return defaultInstance; + private KeyValue() { + row_ = com.google.protobuf.ByteString.EMPTY; + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + timestamp_ = 0L; + keyType_ = 0; + value_ = com.google.protobuf.ByteString.EMPTY; + tags_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private KeyValue( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1304,7 +1326,7 @@ public final class CellProtos { unknownFields.mergeVarintField(5, rawValue); } else { bitField0_ |= 0x00000010; - keyType_ = value; + keyType_ = rawValue; } break; } @@ -1324,7 +1346,7 @@ public final class CellProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1335,30 +1357,14 @@ public final class CellProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.internal_static_hbase_pb_KeyValue_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.internal_static_hbase_pb_KeyValue_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue.class, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public KeyValue parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new KeyValue(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; /** @@ -1374,7 +1380,6 @@ public final class CellProtos { return row_; } - // required bytes family = 2; public static final int FAMILY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString family_; /** @@ -1390,7 +1395,6 @@ public final class CellProtos { return family_; } - // required bytes qualifier = 3; public static final int QUALIFIER_FIELD_NUMBER = 3; private com.google.protobuf.ByteString qualifier_; /** @@ -1406,7 +1410,6 @@ public final class CellProtos { return qualifier_; } - // optional uint64 timestamp = 4; public static final int TIMESTAMP_FIELD_NUMBER = 4; private long timestamp_; /** @@ -1422,9 +1425,8 @@ public final class CellProtos { return timestamp_; } - // optional .hbase.pb.CellType key_type = 5; public static final int KEY_TYPE_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType keyType_; + private int keyType_; /** * optional .hbase.pb.CellType key_type = 5; */ @@ -1435,10 +1437,10 @@ public final class CellProtos { * optional .hbase.pb.CellType key_type = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType getKeyType() { - return keyType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType result = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.valueOf(keyType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM : result; } - // optional bytes value = 6; public static final int VALUE_FIELD_NUMBER = 6; private com.google.protobuf.ByteString value_; /** @@ -1454,7 +1456,6 @@ public final class CellProtos { return value_; } - // optional bytes tags = 7; public static final int TAGS_FIELD_NUMBER = 7; private com.google.protobuf.ByteString tags_; /** @@ -1470,19 +1471,11 @@ public final class CellProtos { return tags_; } - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - timestamp_ = 0L; - keyType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM; - value_ = com.google.protobuf.ByteString.EMPTY; - tags_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRow()) { memoizedIsInitialized = 0; @@ -1502,7 +1495,6 @@ public final class CellProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, row_); } @@ -1516,7 +1508,7 @@ public final class CellProtos { output.writeUInt64(4, timestamp_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeEnum(5, keyType_.getNumber()); + output.writeEnum(5, keyType_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBytes(6, value_); @@ -1524,12 +1516,11 @@ public final class CellProtos { if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeBytes(7, tags_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -1551,7 +1542,7 @@ public final class CellProtos { } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(5, keyType_.getNumber()); + .computeEnumSize(5, keyType_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream @@ -1561,19 +1552,13 @@ public final class CellProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(7, tags_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1606,8 +1591,7 @@ public final class CellProtos { } result = result && (hasKeyType() == other.hasKeyType()); if (hasKeyType()) { - result = result && - (getKeyType() == other.getKeyType()); + result = result && keyType_ == other.keyType_; } result = result && (hasValue() == other.hasValue()); if (hasValue()) { @@ -1619,12 +1603,10 @@ public final class CellProtos { result = result && getTags() .equals(other.getTags()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1646,11 +1628,12 @@ public final class CellProtos { } if (hasTimestamp()) { hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimestamp()); } if (hasKeyType()) { hash = (37 * hash) + KEY_TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getKeyType()); + hash = (53 * hash) + keyType_; } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; @@ -1660,7 +1643,7 @@ public final class CellProtos { hash = (37 * hash) + TAGS_FIELD_NUMBER; hash = (53 * hash) + getTags().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1688,67 +1671,79 @@ public final class CellProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.KeyValue} - * *
      **
      * Protocol buffer version of KeyValue.
      * It doesn't have those transient parameters
      * 
+ * + * Protobuf type {@code hbase.pb.KeyValue} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValueOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.KeyValue) + org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValueOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.internal_static_hbase_pb_KeyValue_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.internal_static_hbase_pb_KeyValue_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1761,18 +1756,15 @@ public final class CellProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -1783,7 +1775,7 @@ public final class CellProtos { bitField0_ = (bitField0_ & ~0x00000004); timestamp_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); - keyType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM; + keyType_ = 0; bitField0_ = (bitField0_ & ~0x00000010); value_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000020); @@ -1792,10 +1784,6 @@ public final class CellProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.internal_static_hbase_pb_KeyValue_descriptor; @@ -1850,6 +1838,32 @@ public final class CellProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue)other); @@ -1882,21 +1896,19 @@ public final class CellProtos { if (other.hasTags()) { setTags(other.getTags()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRow()) { - return false; } if (!hasFamily()) { - return false; } if (!hasQualifier()) { - return false; } return true; @@ -1911,7 +1923,7 @@ public final class CellProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1921,7 +1933,6 @@ public final class CellProtos { } private int bitField0_; - // required bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes row = 1; @@ -1957,7 +1968,6 @@ public final class CellProtos { return this; } - // required bytes family = 2; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family = 2; @@ -1993,7 +2003,6 @@ public final class CellProtos { return this; } - // required bytes qualifier = 3; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes qualifier = 3; @@ -2029,7 +2038,6 @@ public final class CellProtos { return this; } - // optional uint64 timestamp = 4; private long timestamp_ ; /** * optional uint64 timestamp = 4; @@ -2062,8 +2070,7 @@ public final class CellProtos { return this; } - // optional .hbase.pb.CellType key_type = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType keyType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM; + private int keyType_ = 0; /** * optional .hbase.pb.CellType key_type = 5; */ @@ -2074,7 +2081,8 @@ public final class CellProtos { * optional .hbase.pb.CellType key_type = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType getKeyType() { - return keyType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType result = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.valueOf(keyType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM : result; } /** * optional .hbase.pb.CellType key_type = 5; @@ -2084,7 +2092,7 @@ public final class CellProtos { throw new NullPointerException(); } bitField0_ |= 0x00000010; - keyType_ = value; + keyType_ = value.getNumber(); onChanged(); return this; } @@ -2093,12 +2101,11 @@ public final class CellProtos { */ public Builder clearKeyType() { bitField0_ = (bitField0_ & ~0x00000010); - keyType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellType.MINIMUM; + keyType_ = 0; onChanged(); return this; } - // optional bytes value = 6; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes value = 6; @@ -2134,7 +2141,6 @@ public final class CellProtos { return this; } - // optional bytes tags = 7; private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes tags = 7; @@ -2169,34 +2175,71 @@ public final class CellProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.KeyValue) } + // @@protoc_insertion_point(class_scope:hbase.pb.KeyValue) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue DEFAULT_INSTANCE; static { - defaultInstance = new KeyValue(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public KeyValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new KeyValue(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.KeyValue) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Cell_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Cell_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_KeyValue_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_KeyValue_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -2215,29 +2258,29 @@ public final class CellProtos { "\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_Cell_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_Cell_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Cell_descriptor, - new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "CellType", "Value", "Tags", }); - internal_static_hbase_pb_KeyValue_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_KeyValue_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_KeyValue_descriptor, - new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "KeyType", "Value", "Tags", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_hbase_pb_Cell_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_Cell_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Cell_descriptor, + new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "CellType", "Value", "Tags", }); + internal_static_hbase_pb_KeyValue_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_KeyValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_KeyValue_descriptor, + new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "KeyType", "Value", "Tags", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java index a43b5a0..c4ec758 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java @@ -6,26 +6,32 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class ClientProtos { private ClientProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } /** - * Protobuf enum {@code hbase.pb.Consistency} - * *
    **
    * Consistency defines the expected consistency level for an operation.
    * 
+ * + * Protobuf enum {@code hbase.pb.Consistency} */ public enum Consistency implements com.google.protobuf.ProtocolMessageEnum { /** * STRONG = 0; */ - STRONG(0, 0), + STRONG(0), /** * TIMELINE = 1; */ - TIMELINE(1, 1), + TIMELINE(1), ; /** @@ -38,9 +44,19 @@ public final class ClientProtos { public static final int TIMELINE_VALUE = 1; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static Consistency valueOf(int value) { + return forNumber(value); + } + + public static Consistency forNumber(int value) { switch (value) { case 0: return STRONG; case 1: return TIMELINE; @@ -52,17 +68,17 @@ public final class ClientProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + Consistency> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public Consistency findValueByNumber(int number) { - return Consistency.valueOf(number); + return Consistency.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -84,26 +100,24 @@ public final class ClientProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private Consistency(int index, int value) { - this.index = index; + private Consistency(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.Consistency) } - public interface AuthorizationsOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface AuthorizationsOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Authorizations) + com.google.protobuf.MessageOrBuilder { - // repeated string label = 1; /** * repeated string label = 1; */ java.util.List - getLabelList(); + getLabelList(); /** * repeated string label = 1; */ @@ -119,43 +133,35 @@ public final class ClientProtos { getLabelBytes(int index); } /** - * Protobuf type {@code hbase.pb.Authorizations} - * *
    **
    * The protocol buffer version of Authorizations.
    * 
+ * + * Protobuf type {@code hbase.pb.Authorizations} */ - public static final class Authorizations extends - com.google.protobuf.GeneratedMessage - implements AuthorizationsOrBuilder { + public static final class Authorizations extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Authorizations) + AuthorizationsOrBuilder { // Use Authorizations.newBuilder() to construct. - private Authorizations(com.google.protobuf.GeneratedMessage.Builder builder) { + private Authorizations(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private Authorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Authorizations defaultInstance; - public static Authorizations getDefaultInstance() { - return defaultInstance; } - - public Authorizations getDefaultInstanceForType() { - return defaultInstance; + private Authorizations() { + label_ = com.google.protobuf.LazyStringArrayList.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Authorizations( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -175,11 +181,12 @@ public final class ClientProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { label_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } - label_.add(input.readBytes()); + label_.add(bs); break; } } @@ -188,10 +195,10 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - label_ = new com.google.protobuf.UnmodifiableLazyStringList(label_); + label_ = label_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -202,35 +209,19 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Authorizations parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Authorizations(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated string label = 1; public static final int LABEL_FIELD_NUMBER = 1; private com.google.protobuf.LazyStringList label_; /** * repeated string label = 1; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getLabelList() { return label_; } @@ -254,13 +245,11 @@ public final class ClientProtos { return label_.getByteString(index); } - private void initFields() { - label_ = com.google.protobuf.LazyStringArrayList.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -268,41 +257,32 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < label_.size(); i++) { - output.writeBytes(1, label_.getByteString(i)); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, label_.getRaw(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < label_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(label_.getByteString(i)); + dataSize += computeStringSizeNoTag(label_.getRaw(i)); } size += dataSize; size += 1 * getLabelList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -315,12 +295,10 @@ public final class ClientProtos { boolean result = true; result = result && getLabelList() .equals(other.getLabelList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -332,7 +310,7 @@ public final class ClientProtos { hash = (37 * hash) + LABEL_FIELD_NUMBER; hash = (53 * hash) + getLabelList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -360,66 +338,78 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.Authorizations} - * *
      **
      * The protocol buffer version of Authorizations.
      * 
+ * + * Protobuf type {@code hbase.pb.Authorizations} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.AuthorizationsOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Authorizations) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.AuthorizationsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -432,18 +422,15 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); label_ = com.google.protobuf.LazyStringArrayList.EMPTY; @@ -451,10 +438,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor; @@ -476,8 +459,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) == 0x00000001)) { - label_ = new com.google.protobuf.UnmodifiableLazyStringList( - label_); + label_ = label_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.label_ = label_; @@ -485,6 +467,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations)other); @@ -506,7 +514,8 @@ public final class ClientProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -523,7 +532,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -533,7 +542,6 @@ public final class ClientProtos { } private int bitField0_; - // repeated string label = 1; private com.google.protobuf.LazyStringList label_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureLabelIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { @@ -544,9 +552,9 @@ public final class ClientProtos { /** * repeated string label = 1; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getLabelList() { - return java.util.Collections.unmodifiableList(label_); + return label_.getUnmodifiableView(); } /** * repeated string label = 1; @@ -599,7 +607,8 @@ public final class ClientProtos { public Builder addAllLabel( java.lang.Iterable values) { ensureLabelIsMutable(); - super.addAll(values, label_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, label_); onChanged(); return this; } @@ -625,22 +634,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Authorizations) } + // @@protoc_insertion_point(class_scope:hbase.pb.Authorizations) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations DEFAULT_INSTANCE; static { - defaultInstance = new Authorizations(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Authorizations parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Authorizations(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Authorizations) } - public interface CellVisibilityOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CellVisibilityOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CellVisibility) + com.google.protobuf.MessageOrBuilder { - // required string expression = 1; /** * required string expression = 1; */ @@ -656,43 +702,35 @@ public final class ClientProtos { getExpressionBytes(); } /** - * Protobuf type {@code hbase.pb.CellVisibility} - * *
    **
    * The protocol buffer version of CellVisibility.
    * 
+ * + * Protobuf type {@code hbase.pb.CellVisibility} */ - public static final class CellVisibility extends - com.google.protobuf.GeneratedMessage - implements CellVisibilityOrBuilder { + public static final class CellVisibility extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CellVisibility) + CellVisibilityOrBuilder { // Use CellVisibility.newBuilder() to construct. - private CellVisibility(com.google.protobuf.GeneratedMessage.Builder builder) { + private CellVisibility(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CellVisibility(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CellVisibility defaultInstance; - public static CellVisibility getDefaultInstance() { - return defaultInstance; } - - public CellVisibility getDefaultInstanceForType() { - return defaultInstance; + private CellVisibility() { + expression_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CellVisibility( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -712,8 +750,9 @@ public final class ClientProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - expression_ = input.readBytes(); + expression_ = bs; break; } } @@ -722,7 +761,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -733,32 +772,16 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CellVisibility parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CellVisibility(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string expression = 1; public static final int EXPRESSION_FIELD_NUMBER = 1; - private java.lang.Object expression_; + private volatile java.lang.Object expression_; /** * required string expression = 1; */ @@ -799,13 +822,11 @@ public final class ClientProtos { } } - private void initFields() { - expression_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasExpression()) { memoizedIsInitialized = 0; @@ -817,36 +838,27 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getExpressionBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, expression_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getExpressionBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, expression_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -862,12 +874,10 @@ public final class ClientProtos { result = result && getExpression() .equals(other.getExpression()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -879,7 +889,7 @@ public final class ClientProtos { hash = (37 * hash) + EXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getExpression().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -907,66 +917,78 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.CellVisibility} - * *
      **
      * The protocol buffer version of CellVisibility.
      * 
+ * + * Protobuf type {@code hbase.pb.CellVisibility} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibilityOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CellVisibility) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibilityOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -979,18 +1001,15 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); expression_ = ""; @@ -998,10 +1017,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor; @@ -1032,6 +1047,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility)other); @@ -1048,13 +1089,13 @@ public final class ClientProtos { expression_ = other.expression_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasExpression()) { - return false; } return true; @@ -1069,7 +1110,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1079,7 +1120,6 @@ public final class ClientProtos { } private int bitField0_; - // required string expression = 1; private java.lang.Object expression_ = ""; /** * required string expression = 1; @@ -1093,9 +1133,12 @@ public final class ClientProtos { public java.lang.String getExpression() { java.lang.Object ref = expression_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - expression_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + expression_ = s; + } return s; } else { return (java.lang.String) ref; @@ -1152,22 +1195,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CellVisibility) } + // @@protoc_insertion_point(class_scope:hbase.pb.CellVisibility) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility DEFAULT_INSTANCE; static { - defaultInstance = new CellVisibility(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CellVisibility parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CellVisibility(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CellVisibility) } - public interface ColumnOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ColumnOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Column) + com.google.protobuf.MessageOrBuilder { - // required bytes family = 1; /** * required bytes family = 1; */ @@ -1177,7 +1257,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getFamily(); - // repeated bytes qualifier = 2; /** * repeated bytes qualifier = 2; */ @@ -1192,43 +1271,36 @@ public final class ClientProtos { com.google.protobuf.ByteString getQualifier(int index); } /** - * Protobuf type {@code hbase.pb.Column} - * *
    **
    * Container for a list of column qualifier names of a family.
    * 
+ * + * Protobuf type {@code hbase.pb.Column} */ - public static final class Column extends - com.google.protobuf.GeneratedMessage - implements ColumnOrBuilder { + public static final class Column extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Column) + ColumnOrBuilder { // Use Column.newBuilder() to construct. - private Column(com.google.protobuf.GeneratedMessage.Builder builder) { + private Column(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private Column(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Column defaultInstance; - public static Column getDefaultInstance() { - return defaultInstance; } - - public Column getDefaultInstanceForType() { - return defaultInstance; + private Column() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Column( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1266,7 +1338,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { qualifier_ = java.util.Collections.unmodifiableList(qualifier_); @@ -1280,30 +1352,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Column parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Column(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes family = 1; public static final int FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString family_; /** @@ -1319,7 +1375,6 @@ public final class ClientProtos { return family_; } - // repeated bytes qualifier = 2; public static final int QUALIFIER_FIELD_NUMBER = 2; private java.util.List qualifier_; /** @@ -1342,14 +1397,11 @@ public final class ClientProtos { return qualifier_.get(index); } - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFamily()) { memoizedIsInitialized = 0; @@ -1361,19 +1413,17 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, family_); } for (int i = 0; i < qualifier_.size(); i++) { output.writeBytes(2, qualifier_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -1390,19 +1440,13 @@ public final class ClientProtos { size += dataSize; size += 1 * getQualifierList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1420,12 +1464,10 @@ public final class ClientProtos { } result = result && getQualifierList() .equals(other.getQualifierList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1441,7 +1483,7 @@ public final class ClientProtos { hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; hash = (53 * hash) + getQualifierList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1469,66 +1511,78 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.Column} - * *
      **
      * Container for a list of column qualifier names of a family.
      * 
+ * + * Protobuf type {@code hbase.pb.Column} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Column) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1541,18 +1595,15 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; @@ -1562,10 +1613,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor; @@ -1601,6 +1648,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column)other); @@ -1625,13 +1698,13 @@ public final class ClientProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFamily()) { - return false; } return true; @@ -1646,7 +1719,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1656,7 +1729,6 @@ public final class ClientProtos { } private int bitField0_; - // required bytes family = 1; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family = 1; @@ -1692,7 +1764,6 @@ public final class ClientProtos { return this; } - // repeated bytes qualifier = 2; private java.util.List qualifier_ = java.util.Collections.emptyList(); private void ensureQualifierIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { @@ -1750,7 +1821,8 @@ public final class ClientProtos { public Builder addAllQualifier( java.lang.Iterable values) { ensureQualifierIsMutable(); - super.addAll(values, qualifier_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, qualifier_); onChanged(); return this; } @@ -1763,22 +1835,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Column) } + // @@protoc_insertion_point(class_scope:hbase.pb.Column) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column DEFAULT_INSTANCE; static { - defaultInstance = new Column(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Column parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Column(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Column) } - public interface GetOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Get) + com.google.protobuf.MessageOrBuilder { - // required bytes row = 1; /** * required bytes row = 1; */ @@ -1788,7 +1897,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getRow(); - // repeated .hbase.pb.Column column = 2; /** * repeated .hbase.pb.Column column = 2; */ @@ -1813,7 +1921,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index); - // repeated .hbase.pb.NameBytesPair attribute = 3; /** * repeated .hbase.pb.NameBytesPair attribute = 3; */ @@ -1838,7 +1945,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); - // optional .hbase.pb.Filter filter = 4; /** * optional .hbase.pb.Filter filter = 4; */ @@ -1852,7 +1958,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder(); - // optional .hbase.pb.TimeRange time_range = 5; /** * optional .hbase.pb.TimeRange time_range = 5; */ @@ -1866,7 +1971,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - // optional uint32 max_versions = 6 [default = 1]; /** * optional uint32 max_versions = 6 [default = 1]; */ @@ -1876,7 +1980,6 @@ public final class ClientProtos { */ int getMaxVersions(); - // optional bool cache_blocks = 7 [default = true]; /** * optional bool cache_blocks = 7 [default = true]; */ @@ -1886,7 +1989,6 @@ public final class ClientProtos { */ boolean getCacheBlocks(); - // optional uint32 store_limit = 8; /** * optional uint32 store_limit = 8; */ @@ -1896,7 +1998,6 @@ public final class ClientProtos { */ int getStoreLimit(); - // optional uint32 store_offset = 9; /** * optional uint32 store_offset = 9; */ @@ -1906,27 +2007,25 @@ public final class ClientProtos { */ int getStoreOffset(); - // optional bool existence_only = 10 [default = false]; /** - * optional bool existence_only = 10 [default = false]; - * *
      * The result isn't asked for, just check for
      * the existence.
      * 
+ * + * optional bool existence_only = 10 [default = false]; */ boolean hasExistenceOnly(); /** - * optional bool existence_only = 10 [default = false]; - * *
      * The result isn't asked for, just check for
      * the existence.
      * 
+ * + * optional bool existence_only = 10 [default = false]; */ boolean getExistenceOnly(); - // optional .hbase.pb.Consistency consistency = 12 [default = STRONG]; /** * optional .hbase.pb.Consistency consistency = 12 [default = STRONG]; */ @@ -1936,7 +2035,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency(); - // repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13; /** * repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13; */ @@ -1962,45 +2060,46 @@ public final class ClientProtos { int index); } /** - * Protobuf type {@code hbase.pb.Get} - * *
    **
    * The protocol buffer version of Get.
    * Unless existence_only is specified, return all the requested data
    * for the row that matches exactly.
    * 
+ * + * Protobuf type {@code hbase.pb.Get} */ - public static final class Get extends - com.google.protobuf.GeneratedMessage - implements GetOrBuilder { + public static final class Get extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Get) + GetOrBuilder { // Use Get.newBuilder() to construct. - private Get(com.google.protobuf.GeneratedMessage.Builder builder) { + private Get(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private Get(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Get defaultInstance; - public static Get getDefaultInstance() { - return defaultInstance; - } - - public Get getDefaultInstanceForType() { - return defaultInstance; + private Get() { + row_ = com.google.protobuf.ByteString.EMPTY; + column_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + maxVersions_ = 1; + cacheBlocks_ = true; + storeLimit_ = 0; + storeOffset_ = 0; + existenceOnly_ = false; + consistency_ = 0; + cfTimeRange_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Get( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2029,7 +2128,8 @@ public final class ClientProtos { column_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - column_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry)); + column_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry)); break; } case 26: { @@ -2037,7 +2137,8 @@ public final class ClientProtos { attribute_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - attribute_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); + attribute_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); break; } case 34: { @@ -2098,7 +2199,7 @@ public final class ClientProtos { unknownFields.mergeVarintField(12, rawValue); } else { bitField0_ |= 0x00000100; - consistency_ = value; + consistency_ = rawValue; } break; } @@ -2107,7 +2208,8 @@ public final class ClientProtos { cfTimeRange_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000800; } - cfTimeRange_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry)); + cfTimeRange_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry)); break; } } @@ -2116,7 +2218,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { column_ = java.util.Collections.unmodifiableList(column_); @@ -2136,30 +2238,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Get parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Get(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; /** @@ -2175,7 +2261,6 @@ public final class ClientProtos { return row_; } - // repeated .hbase.pb.Column column = 2; public static final int COLUMN_FIELD_NUMBER = 2; private java.util.List column_; /** @@ -2211,7 +2296,6 @@ public final class ClientProtos { return column_.get(index); } - // repeated .hbase.pb.NameBytesPair attribute = 3; public static final int ATTRIBUTE_FIELD_NUMBER = 3; private java.util.List attribute_; /** @@ -2247,7 +2331,6 @@ public final class ClientProtos { return attribute_.get(index); } - // optional .hbase.pb.Filter filter = 4; public static final int FILTER_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_; /** @@ -2260,16 +2343,15 @@ public final class ClientProtos { * optional .hbase.pb.Filter filter = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } /** * optional .hbase.pb.Filter filter = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } - // optional .hbase.pb.TimeRange time_range = 5; public static final int TIME_RANGE_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_; /** @@ -2282,16 +2364,15 @@ public final class ClientProtos { * optional .hbase.pb.TimeRange time_range = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } /** * optional .hbase.pb.TimeRange time_range = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } - // optional uint32 max_versions = 6 [default = 1]; public static final int MAX_VERSIONS_FIELD_NUMBER = 6; private int maxVersions_; /** @@ -2307,7 +2388,6 @@ public final class ClientProtos { return maxVersions_; } - // optional bool cache_blocks = 7 [default = true]; public static final int CACHE_BLOCKS_FIELD_NUMBER = 7; private boolean cacheBlocks_; /** @@ -2323,7 +2403,6 @@ public final class ClientProtos { return cacheBlocks_; } - // optional uint32 store_limit = 8; public static final int STORE_LIMIT_FIELD_NUMBER = 8; private int storeLimit_; /** @@ -2339,7 +2418,6 @@ public final class ClientProtos { return storeLimit_; } - // optional uint32 store_offset = 9; public static final int STORE_OFFSET_FIELD_NUMBER = 9; private int storeOffset_; /** @@ -2355,35 +2433,33 @@ public final class ClientProtos { return storeOffset_; } - // optional bool existence_only = 10 [default = false]; public static final int EXISTENCE_ONLY_FIELD_NUMBER = 10; private boolean existenceOnly_; /** - * optional bool existence_only = 10 [default = false]; - * *
      * The result isn't asked for, just check for
      * the existence.
      * 
+ * + * optional bool existence_only = 10 [default = false]; */ public boolean hasExistenceOnly() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** - * optional bool existence_only = 10 [default = false]; - * *
      * The result isn't asked for, just check for
      * the existence.
      * 
+ * + * optional bool existence_only = 10 [default = false]; */ public boolean getExistenceOnly() { return existenceOnly_; } - // optional .hbase.pb.Consistency consistency = 12 [default = STRONG]; public static final int CONSISTENCY_FIELD_NUMBER = 12; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency consistency_; + private int consistency_; /** * optional .hbase.pb.Consistency consistency = 12 [default = STRONG]; */ @@ -2394,10 +2470,10 @@ public final class ClientProtos { * optional .hbase.pb.Consistency consistency = 12 [default = STRONG]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency() { - return consistency_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.valueOf(consistency_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG : result; } - // repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13; public static final int CF_TIME_RANGE_FIELD_NUMBER = 13; private java.util.List cfTimeRange_; /** @@ -2433,24 +2509,11 @@ public final class ClientProtos { return cfTimeRange_.get(index); } - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - column_ = java.util.Collections.emptyList(); - attribute_ = java.util.Collections.emptyList(); - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - maxVersions_ = 1; - cacheBlocks_ = true; - storeLimit_ = 0; - storeOffset_ = 0; - existenceOnly_ = false; - consistency_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG; - cfTimeRange_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRow()) { memoizedIsInitialized = 0; @@ -2486,7 +2549,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, row_); } @@ -2497,10 +2559,10 @@ public final class ClientProtos { output.writeMessage(3, attribute_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(4, filter_); + output.writeMessage(4, getFilter()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(5, timeRange_); + output.writeMessage(5, getTimeRange()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt32(6, maxVersions_); @@ -2518,17 +2580,16 @@ public final class ClientProtos { output.writeBool(10, existenceOnly_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { - output.writeEnum(12, consistency_.getNumber()); + output.writeEnum(12, consistency_); } for (int i = 0; i < cfTimeRange_.size(); i++) { output.writeMessage(13, cfTimeRange_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -2546,11 +2607,11 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, filter_); + .computeMessageSize(4, getFilter()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, timeRange_); + .computeMessageSize(5, getTimeRange()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream @@ -2574,25 +2635,19 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000100) == 0x00000100)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(12, consistency_.getNumber()); + .computeEnumSize(12, consistency_); } for (int i = 0; i < cfTimeRange_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(13, cfTimeRange_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2649,17 +2704,14 @@ public final class ClientProtos { } result = result && (hasConsistency() == other.hasConsistency()); if (hasConsistency()) { - result = result && - (getConsistency() == other.getConsistency()); + result = result && consistency_ == other.consistency_; } result = result && getCfTimeRangeList() .equals(other.getCfTimeRangeList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2693,7 +2745,8 @@ public final class ClientProtos { } if (hasCacheBlocks()) { hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCacheBlocks()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getCacheBlocks()); } if (hasStoreLimit()) { hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER; @@ -2705,17 +2758,18 @@ public final class ClientProtos { } if (hasExistenceOnly()) { hash = (37 * hash) + EXISTENCE_ONLY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getExistenceOnly()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getExistenceOnly()); } if (hasConsistency()) { hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getConsistency()); + hash = (53 * hash) + consistency_; } if (getCfTimeRangeCount() > 0) { hash = (37 * hash) + CF_TIME_RANGE_FIELD_NUMBER; hash = (53 * hash) + getCfTimeRangeList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2743,68 +2797,80 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.Get} - * *
      **
      * The protocol buffer version of Get.
      * Unless existence_only is specified, return all the requested data
      * for the row that matches exactly.
      * 
+ * + * Protobuf type {@code hbase.pb.Get} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Get) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2817,12 +2883,13 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getColumnFieldBuilder(); getAttributeFieldBuilder(); getFilterFieldBuilder(); @@ -2830,10 +2897,6 @@ public final class ClientProtos { getCfTimeRangeFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -2851,13 +2914,13 @@ public final class ClientProtos { attributeBuilder_.clear(); } if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); + filter_ = null; } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + timeRange_ = null; } else { timeRangeBuilder_.clear(); } @@ -2872,7 +2935,7 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000100); existenceOnly_ = false; bitField0_ = (bitField0_ & ~0x00000200); - consistency_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG; + consistency_ = 0; bitField0_ = (bitField0_ & ~0x00000400); if (cfTimeRangeBuilder_ == null) { cfTimeRange_ = java.util.Collections.emptyList(); @@ -2883,10 +2946,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor; @@ -2984,6 +3043,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get)other); @@ -3017,7 +3102,7 @@ public final class ClientProtos { column_ = other.column_; bitField0_ = (bitField0_ & ~0x00000002); columnBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getColumnFieldBuilder() : null; } else { columnBuilder_.addAllMessages(other.column_); @@ -3043,7 +3128,7 @@ public final class ClientProtos { attribute_ = other.attribute_; bitField0_ = (bitField0_ & ~0x00000004); attributeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributeFieldBuilder() : null; } else { attributeBuilder_.addAllMessages(other.attribute_); @@ -3093,43 +3178,39 @@ public final class ClientProtos { cfTimeRange_ = other.cfTimeRange_; bitField0_ = (bitField0_ & ~0x00000800); cfTimeRangeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCfTimeRangeFieldBuilder() : null; } else { cfTimeRangeBuilder_.addAllMessages(other.cfTimeRange_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRow()) { - return false; } for (int i = 0; i < getColumnCount(); i++) { if (!getColumn(i).isInitialized()) { - return false; } } for (int i = 0; i < getAttributeCount(); i++) { if (!getAttribute(i).isInitialized()) { - return false; } } if (hasFilter()) { if (!getFilter().isInitialized()) { - return false; } } for (int i = 0; i < getCfTimeRangeCount(); i++) { if (!getCfTimeRange(i).isInitialized()) { - return false; } } @@ -3145,7 +3226,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3155,7 +3236,6 @@ public final class ClientProtos { } private int bitField0_; - // required bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes row = 1; @@ -3191,7 +3271,6 @@ public final class ClientProtos { return this; } - // repeated .hbase.pb.Column column = 2; private java.util.List column_ = java.util.Collections.emptyList(); private void ensureColumnIsMutable() { @@ -3201,7 +3280,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; /** @@ -3333,7 +3412,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (columnBuilder_ == null) { ensureColumnIsMutable(); - super.addAll(values, column_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, column_); onChanged(); } else { columnBuilder_.addAllMessages(values); @@ -3416,11 +3496,11 @@ public final class ClientProtos { getColumnBuilderList() { return getColumnFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnFieldBuilder() { if (columnBuilder_ == null) { - columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder>( column_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -3431,7 +3511,6 @@ public final class ClientProtos { return columnBuilder_; } - // repeated .hbase.pb.NameBytesPair attribute = 3; private java.util.List attribute_ = java.util.Collections.emptyList(); private void ensureAttributeIsMutable() { @@ -3441,7 +3520,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; /** @@ -3573,7 +3652,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); - super.addAll(values, attribute_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, attribute_); onChanged(); } else { attributeBuilder_.addAllMessages(values); @@ -3656,11 +3736,11 @@ public final class ClientProtos { getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder() { if (attributeBuilder_ == null) { - attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( attribute_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -3671,9 +3751,8 @@ public final class ClientProtos { return attributeBuilder_; } - // optional .hbase.pb.Filter filter = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; /** * optional .hbase.pb.Filter filter = 4; @@ -3686,7 +3765,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { if (filterBuilder_ == null) { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } else { return filterBuilder_.getMessage(); } @@ -3727,6 +3806,7 @@ public final class ClientProtos { public Builder mergeFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + filter_ != null && filter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); @@ -3745,7 +3825,7 @@ public final class ClientProtos { */ public Builder clearFilter() { if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); + filter_ = null; onChanged(); } else { filterBuilder_.clear(); @@ -3768,19 +3848,20 @@ public final class ClientProtos { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { - return filter_; + return filter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } } /** * optional .hbase.pb.Filter filter = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + filterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder>( - filter_, + getFilter(), getParentForChildren(), isClean()); filter_ = null; @@ -3788,9 +3869,8 @@ public final class ClientProtos { return filterBuilder_; } - // optional .hbase.pb.TimeRange time_range = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; /** * optional .hbase.pb.TimeRange time_range = 5; @@ -3803,7 +3883,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } else { return timeRangeBuilder_.getMessage(); } @@ -3844,6 +3924,7 @@ public final class ClientProtos { public Builder mergeTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && + timeRange_ != null && timeRange_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); @@ -3862,7 +3943,7 @@ public final class ClientProtos { */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + timeRange_ = null; onChanged(); } else { timeRangeBuilder_.clear(); @@ -3885,19 +3966,20 @@ public final class ClientProtos { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); } else { - return timeRange_; + return timeRange_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } } /** * optional .hbase.pb.TimeRange time_range = 5; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, + getTimeRange(), getParentForChildren(), isClean()); timeRange_ = null; @@ -3905,7 +3987,6 @@ public final class ClientProtos { return timeRangeBuilder_; } - // optional uint32 max_versions = 6 [default = 1]; private int maxVersions_ = 1; /** * optional uint32 max_versions = 6 [default = 1]; @@ -3938,7 +4019,6 @@ public final class ClientProtos { return this; } - // optional bool cache_blocks = 7 [default = true]; private boolean cacheBlocks_ = true; /** * optional bool cache_blocks = 7 [default = true]; @@ -3971,7 +4051,6 @@ public final class ClientProtos { return this; } - // optional uint32 store_limit = 8; private int storeLimit_ ; /** * optional uint32 store_limit = 8; @@ -4004,7 +4083,6 @@ public final class ClientProtos { return this; } - // optional uint32 store_offset = 9; private int storeOffset_ ; /** * optional uint32 store_offset = 9; @@ -4037,37 +4115,36 @@ public final class ClientProtos { return this; } - // optional bool existence_only = 10 [default = false]; private boolean existenceOnly_ ; /** - * optional bool existence_only = 10 [default = false]; - * *
        * The result isn't asked for, just check for
        * the existence.
        * 
+ * + * optional bool existence_only = 10 [default = false]; */ public boolean hasExistenceOnly() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** - * optional bool existence_only = 10 [default = false]; - * *
        * The result isn't asked for, just check for
        * the existence.
        * 
+ * + * optional bool existence_only = 10 [default = false]; */ public boolean getExistenceOnly() { return existenceOnly_; } /** - * optional bool existence_only = 10 [default = false]; - * *
        * The result isn't asked for, just check for
        * the existence.
        * 
+ * + * optional bool existence_only = 10 [default = false]; */ public Builder setExistenceOnly(boolean value) { bitField0_ |= 0x00000200; @@ -4076,12 +4153,12 @@ public final class ClientProtos { return this; } /** - * optional bool existence_only = 10 [default = false]; - * *
        * The result isn't asked for, just check for
        * the existence.
        * 
+ * + * optional bool existence_only = 10 [default = false]; */ public Builder clearExistenceOnly() { bitField0_ = (bitField0_ & ~0x00000200); @@ -4090,8 +4167,7 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.Consistency consistency = 12 [default = STRONG]; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG; + private int consistency_ = 0; /** * optional .hbase.pb.Consistency consistency = 12 [default = STRONG]; */ @@ -4102,7 +4178,8 @@ public final class ClientProtos { * optional .hbase.pb.Consistency consistency = 12 [default = STRONG]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency() { - return consistency_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.valueOf(consistency_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG : result; } /** * optional .hbase.pb.Consistency consistency = 12 [default = STRONG]; @@ -4112,7 +4189,7 @@ public final class ClientProtos { throw new NullPointerException(); } bitField0_ |= 0x00000400; - consistency_ = value; + consistency_ = value.getNumber(); onChanged(); return this; } @@ -4121,12 +4198,11 @@ public final class ClientProtos { */ public Builder clearConsistency() { bitField0_ = (bitField0_ & ~0x00000400); - consistency_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG; + consistency_ = 0; onChanged(); return this; } - // repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13; private java.util.List cfTimeRange_ = java.util.Collections.emptyList(); private void ensureCfTimeRangeIsMutable() { @@ -4136,7 +4212,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> cfTimeRangeBuilder_; /** @@ -4268,7 +4344,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); - super.addAll(values, cfTimeRange_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, cfTimeRange_); onChanged(); } else { cfTimeRangeBuilder_.addAllMessages(values); @@ -4351,11 +4428,11 @@ public final class ClientProtos { getCfTimeRangeBuilderList() { return getCfTimeRangeFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeFieldBuilder() { if (cfTimeRangeBuilder_ == null) { - cfTimeRangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + cfTimeRangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>( cfTimeRange_, ((bitField0_ & 0x00000800) == 0x00000800), @@ -4365,75 +4442,109 @@ public final class ClientProtos { } return cfTimeRangeBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Get) } + // @@protoc_insertion_point(class_scope:hbase.pb.Get) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get DEFAULT_INSTANCE; static { - defaultInstance = new Get(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Get parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Get(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Get) } - public interface ResultOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Result) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.Cell cell = 1; /** - * repeated .hbase.pb.Cell cell = 1; - * *
      * Result includes the Cells or else it just has a count of Cells
      * that are carried otherwise.
      * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ java.util.List getCellList(); /** - * repeated .hbase.pb.Cell cell = 1; - * *
      * Result includes the Cells or else it just has a count of Cells
      * that are carried otherwise.
      * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell getCell(int index); /** - * repeated .hbase.pb.Cell cell = 1; - * *
      * Result includes the Cells or else it just has a count of Cells
      * that are carried otherwise.
      * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ int getCellCount(); /** - * repeated .hbase.pb.Cell cell = 1; - * *
      * Result includes the Cells or else it just has a count of Cells
      * that are carried otherwise.
      * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ java.util.List getCellOrBuilderList(); /** - * repeated .hbase.pb.Cell cell = 1; - * *
      * Result includes the Cells or else it just has a count of Cells
      * that are carried otherwise.
      * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder( int index); - // optional int32 associated_cell_count = 2; /** - * optional int32 associated_cell_count = 2; - * *
      * The below count is set when the associated cells are
      * not part of this protobuf message; they are passed alongside
@@ -4442,11 +4553,11 @@ public final class ClientProtos {
      * ours.  NOTE: This is different from the pb managed cell_count of the
      * 'cell' field above which is non-null when the cells are pb'd.
      * 
+ * + * optional int32 associated_cell_count = 2; */ boolean hasAssociatedCellCount(); /** - * optional int32 associated_cell_count = 2; - * *
      * The below count is set when the associated cells are
      * not part of this protobuf message; they are passed alongside
@@ -4455,104 +4566,99 @@ public final class ClientProtos {
      * ours.  NOTE: This is different from the pb managed cell_count of the
      * 'cell' field above which is non-null when the cells are pb'd.
      * 
+ * + * optional int32 associated_cell_count = 2; */ int getAssociatedCellCount(); - // optional bool exists = 3; /** - * optional bool exists = 3; - * *
      * used for Get to check existence only. Not set if existence_only was not set to true
      *  in the query.
      * 
+ * + * optional bool exists = 3; */ boolean hasExists(); /** - * optional bool exists = 3; - * *
      * used for Get to check existence only. Not set if existence_only was not set to true
      *  in the query.
      * 
+ * + * optional bool exists = 3; */ boolean getExists(); - // optional bool stale = 4 [default = false]; /** - * optional bool stale = 4 [default = false]; - * *
      * Whether or not the results are coming from possibly stale data 
      * 
+ * + * optional bool stale = 4 [default = false]; */ boolean hasStale(); /** - * optional bool stale = 4 [default = false]; - * *
      * Whether or not the results are coming from possibly stale data 
      * 
+ * + * optional bool stale = 4 [default = false]; */ boolean getStale(); - // optional bool partial = 5 [default = false]; /** - * optional bool partial = 5 [default = false]; - * *
      * Whether or not the entire result could be returned. Results will be split when
      * the RPC chunk size limit is reached. Partial results contain only a subset of the
      * cells for a row and must be combined with a result containing the remaining cells
      * to form a complete result
      * 
+ * + * optional bool partial = 5 [default = false]; */ boolean hasPartial(); /** - * optional bool partial = 5 [default = false]; - * *
      * Whether or not the entire result could be returned. Results will be split when
      * the RPC chunk size limit is reached. Partial results contain only a subset of the
      * cells for a row and must be combined with a result containing the remaining cells
      * to form a complete result
      * 
+ * + * optional bool partial = 5 [default = false]; */ boolean getPartial(); } /** * Protobuf type {@code hbase.pb.Result} */ - public static final class Result extends - com.google.protobuf.GeneratedMessage - implements ResultOrBuilder { + public static final class Result extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Result) + ResultOrBuilder { // Use Result.newBuilder() to construct. - private Result(com.google.protobuf.GeneratedMessage.Builder builder) { + private Result(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private Result(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Result defaultInstance; - public static Result getDefaultInstance() { - return defaultInstance; } - - public Result getDefaultInstanceForType() { - return defaultInstance; + private Result() { + cell_ = java.util.Collections.emptyList(); + associatedCellCount_ = 0; + exists_ = false; + stale_ = false; + partial_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Result( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4576,7 +4682,8 @@ public final class ClientProtos { cell_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - cell_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.PARSER, extensionRegistry)); + cell_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.PARSER, extensionRegistry)); break; } case 16: { @@ -4605,7 +4712,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { cell_ = java.util.Collections.unmodifiableList(cell_); @@ -4619,96 +4726,77 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Result parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Result(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // repeated .hbase.pb.Cell cell = 1; public static final int CELL_FIELD_NUMBER = 1; private java.util.List cell_; /** - * repeated .hbase.pb.Cell cell = 1; - * *
      * Result includes the Cells or else it just has a count of Cells
      * that are carried otherwise.
      * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public java.util.List getCellList() { return cell_; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
      * Result includes the Cells or else it just has a count of Cells
      * that are carried otherwise.
      * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public java.util.List getCellOrBuilderList() { return cell_; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
      * Result includes the Cells or else it just has a count of Cells
      * that are carried otherwise.
      * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public int getCellCount() { return cell_.size(); } /** - * repeated .hbase.pb.Cell cell = 1; - * *
      * Result includes the Cells or else it just has a count of Cells
      * that are carried otherwise.
      * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell getCell(int index) { return cell_.get(index); } /** - * repeated .hbase.pb.Cell cell = 1; - * *
      * Result includes the Cells or else it just has a count of Cells
      * that are carried otherwise.
      * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder( int index) { return cell_.get(index); } - // optional int32 associated_cell_count = 2; public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 2; private int associatedCellCount_; /** - * optional int32 associated_cell_count = 2; - * *
      * The below count is set when the associated cells are
      * not part of this protobuf message; they are passed alongside
@@ -4717,13 +4805,13 @@ public final class ClientProtos {
      * ours.  NOTE: This is different from the pb managed cell_count of the
      * 'cell' field above which is non-null when the cells are pb'd.
      * 
+ * + * optional int32 associated_cell_count = 2; */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional int32 associated_cell_count = 2; - * *
      * The below count is set when the associated cells are
      * not part of this protobuf message; they are passed alongside
@@ -4732,102 +4820,95 @@ public final class ClientProtos {
      * ours.  NOTE: This is different from the pb managed cell_count of the
      * 'cell' field above which is non-null when the cells are pb'd.
      * 
+ * + * optional int32 associated_cell_count = 2; */ public int getAssociatedCellCount() { return associatedCellCount_; } - // optional bool exists = 3; public static final int EXISTS_FIELD_NUMBER = 3; private boolean exists_; /** - * optional bool exists = 3; - * *
      * used for Get to check existence only. Not set if existence_only was not set to true
      *  in the query.
      * 
+ * + * optional bool exists = 3; */ public boolean hasExists() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional bool exists = 3; - * *
      * used for Get to check existence only. Not set if existence_only was not set to true
      *  in the query.
      * 
+ * + * optional bool exists = 3; */ public boolean getExists() { return exists_; } - // optional bool stale = 4 [default = false]; public static final int STALE_FIELD_NUMBER = 4; private boolean stale_; /** - * optional bool stale = 4 [default = false]; - * *
      * Whether or not the results are coming from possibly stale data 
      * 
+ * + * optional bool stale = 4 [default = false]; */ public boolean hasStale() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional bool stale = 4 [default = false]; - * *
      * Whether or not the results are coming from possibly stale data 
      * 
+ * + * optional bool stale = 4 [default = false]; */ public boolean getStale() { return stale_; } - // optional bool partial = 5 [default = false]; public static final int PARTIAL_FIELD_NUMBER = 5; private boolean partial_; /** - * optional bool partial = 5 [default = false]; - * *
      * Whether or not the entire result could be returned. Results will be split when
      * the RPC chunk size limit is reached. Partial results contain only a subset of the
      * cells for a row and must be combined with a result containing the remaining cells
      * to form a complete result
      * 
+ * + * optional bool partial = 5 [default = false]; */ public boolean hasPartial() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional bool partial = 5 [default = false]; - * *
      * Whether or not the entire result could be returned. Results will be split when
      * the RPC chunk size limit is reached. Partial results contain only a subset of the
      * cells for a row and must be combined with a result containing the remaining cells
      * to form a complete result
      * 
+ * + * optional bool partial = 5 [default = false]; */ public boolean getPartial() { return partial_; } - private void initFields() { - cell_ = java.util.Collections.emptyList(); - associatedCellCount_ = 0; - exists_ = false; - stale_ = false; - partial_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -4835,7 +4916,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < cell_.size(); i++) { output.writeMessage(1, cell_.get(i)); } @@ -4851,12 +4931,11 @@ public final class ClientProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(5, partial_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -4880,19 +4959,13 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, partial_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4925,12 +4998,10 @@ public final class ClientProtos { result = result && (getPartial() == other.getPartial()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4948,17 +5019,20 @@ public final class ClientProtos { } if (hasExists()) { hash = (37 * hash) + EXISTS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getExists()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getExists()); } if (hasStale()) { hash = (37 * hash) + STALE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getStale()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getStale()); } if (hasPartial()) { hash = (37 * hash) + PARTIAL_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getPartial()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getPartial()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4986,46 +5060,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -5033,14 +5118,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.Result} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Result) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5053,19 +5139,16 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getCellFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (cellBuilder_ == null) { @@ -5085,10 +5168,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor; @@ -5140,6 +5219,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result)other); @@ -5170,7 +5275,7 @@ public final class ClientProtos { cell_ = other.cell_; bitField0_ = (bitField0_ & ~0x00000001); cellBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCellFieldBuilder() : null; } else { cellBuilder_.addAllMessages(other.cell_); @@ -5189,7 +5294,8 @@ public final class ClientProtos { if (other.hasPartial()) { setPartial(other.getPartial()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -5206,7 +5312,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5216,7 +5322,6 @@ public final class ClientProtos { } private int bitField0_; - // repeated .hbase.pb.Cell cell = 1; private java.util.List cell_ = java.util.Collections.emptyList(); private void ensureCellIsMutable() { @@ -5226,16 +5331,16 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder> cellBuilder_; /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public java.util.List getCellList() { if (cellBuilder_ == null) { @@ -5245,12 +5350,12 @@ public final class ClientProtos { } } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public int getCellCount() { if (cellBuilder_ == null) { @@ -5260,12 +5365,12 @@ public final class ClientProtos { } } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell getCell(int index) { if (cellBuilder_ == null) { @@ -5275,12 +5380,12 @@ public final class ClientProtos { } } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public Builder setCell( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell value) { @@ -5297,12 +5402,12 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public Builder setCell( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder builderForValue) { @@ -5316,12 +5421,12 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public Builder addCell(org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell value) { if (cellBuilder_ == null) { @@ -5337,12 +5442,12 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public Builder addCell( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell value) { @@ -5359,12 +5464,12 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public Builder addCell( org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder builderForValue) { @@ -5378,12 +5483,12 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public Builder addCell( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder builderForValue) { @@ -5397,18 +5502,19 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public Builder addAllCell( java.lang.Iterable values) { if (cellBuilder_ == null) { ensureCellIsMutable(); - super.addAll(values, cell_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, cell_); onChanged(); } else { cellBuilder_.addAllMessages(values); @@ -5416,12 +5522,12 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public Builder clearCell() { if (cellBuilder_ == null) { @@ -5434,12 +5540,12 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public Builder removeCell(int index) { if (cellBuilder_ == null) { @@ -5452,24 +5558,24 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder getCellBuilder( int index) { return getCellFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder( int index) { @@ -5479,12 +5585,12 @@ public final class ClientProtos { } } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public java.util.List getCellOrBuilderList() { @@ -5495,24 +5601,24 @@ public final class ClientProtos { } } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder addCellBuilder() { return getCellFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.getDefaultInstance()); } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder addCellBuilder( int index) { @@ -5520,22 +5626,22 @@ public final class ClientProtos { index, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.getDefaultInstance()); } /** - * repeated .hbase.pb.Cell cell = 1; - * *
        * Result includes the Cells or else it just has a count of Cells
        * that are carried otherwise.
        * 
+ * + * repeated .hbase.pb.Cell cell = 1; */ public java.util.List getCellBuilderList() { return getCellFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder> getCellFieldBuilder() { if (cellBuilder_ == null) { - cellBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + cellBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder>( cell_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -5546,11 +5652,8 @@ public final class ClientProtos { return cellBuilder_; } - // optional int32 associated_cell_count = 2; private int associatedCellCount_ ; /** - * optional int32 associated_cell_count = 2; - * *
        * The below count is set when the associated cells are
        * not part of this protobuf message; they are passed alongside
@@ -5559,13 +5662,13 @@ public final class ClientProtos {
        * ours.  NOTE: This is different from the pb managed cell_count of the
        * 'cell' field above which is non-null when the cells are pb'd.
        * 
+ * + * optional int32 associated_cell_count = 2; */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional int32 associated_cell_count = 2; - * *
        * The below count is set when the associated cells are
        * not part of this protobuf message; they are passed alongside
@@ -5574,13 +5677,13 @@ public final class ClientProtos {
        * ours.  NOTE: This is different from the pb managed cell_count of the
        * 'cell' field above which is non-null when the cells are pb'd.
        * 
+ * + * optional int32 associated_cell_count = 2; */ public int getAssociatedCellCount() { return associatedCellCount_; } /** - * optional int32 associated_cell_count = 2; - * *
        * The below count is set when the associated cells are
        * not part of this protobuf message; they are passed alongside
@@ -5589,6 +5692,8 @@ public final class ClientProtos {
        * ours.  NOTE: This is different from the pb managed cell_count of the
        * 'cell' field above which is non-null when the cells are pb'd.
        * 
+ * + * optional int32 associated_cell_count = 2; */ public Builder setAssociatedCellCount(int value) { bitField0_ |= 0x00000002; @@ -5597,8 +5702,6 @@ public final class ClientProtos { return this; } /** - * optional int32 associated_cell_count = 2; - * *
        * The below count is set when the associated cells are
        * not part of this protobuf message; they are passed alongside
@@ -5607,6 +5710,8 @@ public final class ClientProtos {
        * ours.  NOTE: This is different from the pb managed cell_count of the
        * 'cell' field above which is non-null when the cells are pb'd.
        * 
+ * + * optional int32 associated_cell_count = 2; */ public Builder clearAssociatedCellCount() { bitField0_ = (bitField0_ & ~0x00000002); @@ -5615,37 +5720,36 @@ public final class ClientProtos { return this; } - // optional bool exists = 3; private boolean exists_ ; /** - * optional bool exists = 3; - * *
        * used for Get to check existence only. Not set if existence_only was not set to true
        *  in the query.
        * 
+ * + * optional bool exists = 3; */ public boolean hasExists() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional bool exists = 3; - * *
        * used for Get to check existence only. Not set if existence_only was not set to true
        *  in the query.
        * 
+ * + * optional bool exists = 3; */ public boolean getExists() { return exists_; } /** - * optional bool exists = 3; - * *
        * used for Get to check existence only. Not set if existence_only was not set to true
        *  in the query.
        * 
+ * + * optional bool exists = 3; */ public Builder setExists(boolean value) { bitField0_ |= 0x00000004; @@ -5654,12 +5758,12 @@ public final class ClientProtos { return this; } /** - * optional bool exists = 3; - * *
        * used for Get to check existence only. Not set if existence_only was not set to true
        *  in the query.
        * 
+ * + * optional bool exists = 3; */ public Builder clearExists() { bitField0_ = (bitField0_ & ~0x00000004); @@ -5668,34 +5772,33 @@ public final class ClientProtos { return this; } - // optional bool stale = 4 [default = false]; private boolean stale_ ; /** - * optional bool stale = 4 [default = false]; - * *
        * Whether or not the results are coming from possibly stale data 
        * 
+ * + * optional bool stale = 4 [default = false]; */ public boolean hasStale() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional bool stale = 4 [default = false]; - * *
        * Whether or not the results are coming from possibly stale data 
        * 
+ * + * optional bool stale = 4 [default = false]; */ public boolean getStale() { return stale_; } /** - * optional bool stale = 4 [default = false]; - * *
        * Whether or not the results are coming from possibly stale data 
        * 
+ * + * optional bool stale = 4 [default = false]; */ public Builder setStale(boolean value) { bitField0_ |= 0x00000008; @@ -5704,11 +5807,11 @@ public final class ClientProtos { return this; } /** - * optional bool stale = 4 [default = false]; - * *
        * Whether or not the results are coming from possibly stale data 
        * 
+ * + * optional bool stale = 4 [default = false]; */ public Builder clearStale() { bitField0_ = (bitField0_ & ~0x00000008); @@ -5717,43 +5820,42 @@ public final class ClientProtos { return this; } - // optional bool partial = 5 [default = false]; private boolean partial_ ; /** - * optional bool partial = 5 [default = false]; - * *
        * Whether or not the entire result could be returned. Results will be split when
        * the RPC chunk size limit is reached. Partial results contain only a subset of the
        * cells for a row and must be combined with a result containing the remaining cells
        * to form a complete result
        * 
+ * + * optional bool partial = 5 [default = false]; */ public boolean hasPartial() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional bool partial = 5 [default = false]; - * *
        * Whether or not the entire result could be returned. Results will be split when
        * the RPC chunk size limit is reached. Partial results contain only a subset of the
        * cells for a row and must be combined with a result containing the remaining cells
        * to form a complete result
        * 
+ * + * optional bool partial = 5 [default = false]; */ public boolean getPartial() { return partial_; } /** - * optional bool partial = 5 [default = false]; - * *
        * Whether or not the entire result could be returned. Results will be split when
        * the RPC chunk size limit is reached. Partial results contain only a subset of the
        * cells for a row and must be combined with a result containing the remaining cells
        * to form a complete result
        * 
+ * + * optional bool partial = 5 [default = false]; */ public Builder setPartial(boolean value) { bitField0_ |= 0x00000010; @@ -5762,14 +5864,14 @@ public final class ClientProtos { return this; } /** - * optional bool partial = 5 [default = false]; - * *
        * Whether or not the entire result could be returned. Results will be split when
        * the RPC chunk size limit is reached. Partial results contain only a subset of the
        * cells for a row and must be combined with a result containing the remaining cells
        * to form a complete result
        * 
+ * + * optional bool partial = 5 [default = false]; */ public Builder clearPartial() { bitField0_ = (bitField0_ & ~0x00000010); @@ -5777,22 +5879,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Result) } + // @@protoc_insertion_point(class_scope:hbase.pb.Result) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result DEFAULT_INSTANCE; static { - defaultInstance = new Result(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Result parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Result(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Result) } - public interface GetRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -5806,7 +5945,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // required .hbase.pb.Get get = 2; /** * required .hbase.pb.Get get = 2; */ @@ -5821,43 +5959,34 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); } /** - * Protobuf type {@code hbase.pb.GetRequest} - * *
    **
    * The get request. Perform a single Get operation.
    * 
+ * + * Protobuf type {@code hbase.pb.GetRequest} */ - public static final class GetRequest extends - com.google.protobuf.GeneratedMessage - implements GetRequestOrBuilder { + public static final class GetRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetRequest) + GetRequestOrBuilder { // Use GetRequest.newBuilder() to construct. - private GetRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetRequest defaultInstance; - public static GetRequest getDefaultInstance() { - return defaultInstance; + private GetRequest() { } - public GetRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -5908,7 +6037,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -5919,30 +6048,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -5955,16 +6068,15 @@ public final class ClientProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // required .hbase.pb.Get get = 2; public static final int GET_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get get_; /** @@ -5977,23 +6089,20 @@ public final class ClientProtos { * required .hbase.pb.Get get = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getGet() { - return get_; + return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } /** * required .hbase.pb.Get get = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { - return get_; + return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -6017,43 +6126,35 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, get_); + output.writeMessage(2, getGet()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, get_); + .computeMessageSize(2, getGet()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6074,12 +6175,10 @@ public final class ClientProtos { result = result && getGet() .equals(other.getGet()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6095,7 +6194,7 @@ public final class ClientProtos { hash = (37 * hash) + GET_FIELD_NUMBER; hash = (53 * hash) + getGet().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6123,66 +6222,78 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.GetRequest} - * *
      **
      * The get request. Perform a single Get operation.
      * 
+ * + * Protobuf type {@code hbase.pb.GetRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6195,30 +6306,27 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getGetFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + get_ = null; } else { getBuilder_.clear(); } @@ -6226,10 +6334,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor; @@ -6272,6 +6376,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest)other); @@ -6289,25 +6419,22 @@ public final class ClientProtos { if (other.hasGet()) { mergeGet(other.getGet()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!hasGet()) { - return false; } if (!getRegion().isInitialized()) { - return false; } if (!getGet().isInitialized()) { - return false; } return true; @@ -6322,7 +6449,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6332,9 +6459,8 @@ public final class ClientProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -6347,7 +6473,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -6388,6 +6514,7 @@ public final class ClientProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -6406,7 +6533,7 @@ public final class ClientProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -6429,19 +6556,20 @@ public final class ClientProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -6449,9 +6577,8 @@ public final class ClientProtos { return regionBuilder_; } - // required .hbase.pb.Get get = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get get_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; /** * required .hbase.pb.Get get = 2; @@ -6464,7 +6591,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getGet() { if (getBuilder_ == null) { - return get_; + return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } else { return getBuilder_.getMessage(); } @@ -6505,6 +6632,7 @@ public final class ClientProtos { public Builder mergeGet(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + get_ != null && get_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); @@ -6523,7 +6651,7 @@ public final class ClientProtos { */ public Builder clearGet() { if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + get_ = null; onChanged(); } else { getBuilder_.clear(); @@ -6546,41 +6674,79 @@ public final class ClientProtos { if (getBuilder_ != null) { return getBuilder_.getMessageOrBuilder(); } else { - return get_; + return get_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } } /** * required .hbase.pb.Get get = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder> getGetFieldBuilder() { if (getBuilder_ == null) { - getBuilder_ = new com.google.protobuf.SingleFieldBuilder< + getBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder>( - get_, + getGet(), getParentForChildren(), isClean()); get_ = null; } return getBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetRequest) } - public interface GetResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetResponse) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.Result result = 1; /** * optional .hbase.pb.Result result = 1; */ @@ -6597,36 +6763,27 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.GetResponse} */ - public static final class GetResponse extends - com.google.protobuf.GeneratedMessage - implements GetResponseOrBuilder { + public static final class GetResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetResponse) + GetResponseOrBuilder { // Use GetResponse.newBuilder() to construct. - private GetResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetResponse defaultInstance; - public static GetResponse getDefaultInstance() { - return defaultInstance; } - - public GetResponse getDefaultInstanceForType() { - return defaultInstance; + private GetResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6664,7 +6821,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -6675,30 +6832,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.Result result = 1; public static final int RESULT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_; /** @@ -6711,22 +6852,20 @@ public final class ClientProtos { * optional .hbase.pb.Result result = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { - return result_; + return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } /** * optional .hbase.pb.Result result = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { - return result_; + return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } - private void initFields() { - result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -6734,36 +6873,28 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, result_); + output.writeMessage(1, getResult()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, result_); + .computeMessageSize(1, getResult()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6779,12 +6910,10 @@ public final class ClientProtos { result = result && getResult() .equals(other.getResult()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6796,7 +6925,7 @@ public final class ClientProtos { hash = (37 * hash) + RESULT_FIELD_NUMBER; hash = (53 * hash) + getResult().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6824,46 +6953,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6871,14 +7011,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.GetResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6891,23 +7032,20 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getResultFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + result_ = null; } else { resultBuilder_.clear(); } @@ -6915,10 +7053,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor; @@ -6953,6 +7087,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse)other); @@ -6967,7 +7127,8 @@ public final class ClientProtos { if (other.hasResult()) { mergeResult(other.getResult()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -6984,7 +7145,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6994,9 +7155,8 @@ public final class ClientProtos { } private int bitField0_; - // optional .hbase.pb.Result result = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; /** * optional .hbase.pb.Result result = 1; @@ -7009,7 +7169,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { if (resultBuilder_ == null) { - return result_; + return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } else { return resultBuilder_.getMessage(); } @@ -7050,6 +7210,7 @@ public final class ClientProtos { public Builder mergeResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + result_ != null && result_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); @@ -7068,7 +7229,7 @@ public final class ClientProtos { */ public Builder clearResult() { if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + result_ = null; onChanged(); } else { resultBuilder_.clear(); @@ -7091,41 +7252,79 @@ public final class ClientProtos { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilder(); } else { - return result_; + return result_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } } /** * optional .hbase.pb.Result result = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + resultBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder>( - result_, + getResult(), getParentForChildren(), isClean()); result_ = null; } return resultBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetResponse) } - public interface ConditionOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ConditionOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Condition) + com.google.protobuf.MessageOrBuilder { - // required bytes row = 1; /** * required bytes row = 1; */ @@ -7135,7 +7334,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getRow(); - // required bytes family = 2; /** * required bytes family = 2; */ @@ -7145,7 +7343,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getFamily(); - // required bytes qualifier = 3; /** * required bytes qualifier = 3; */ @@ -7155,7 +7352,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getQualifier(); - // required .hbase.pb.CompareType compare_type = 4; /** * required .hbase.pb.CompareType compare_type = 4; */ @@ -7165,7 +7361,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareType(); - // required .hbase.pb.Comparator comparator = 5; /** * required .hbase.pb.Comparator comparator = 5; */ @@ -7180,46 +7375,40 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); } /** - * Protobuf type {@code hbase.pb.Condition} - * *
    **
    * Condition to check if the value of a given cell (row,
    * family, qualifier) matches a value via a given comparator.
-   *
    * Condition is used in check and mutate operations.
    * 
+ * + * Protobuf type {@code hbase.pb.Condition} */ - public static final class Condition extends - com.google.protobuf.GeneratedMessage - implements ConditionOrBuilder { + public static final class Condition extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Condition) + ConditionOrBuilder { // Use Condition.newBuilder() to construct. - private Condition(com.google.protobuf.GeneratedMessage.Builder builder) { + private Condition(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private Condition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Condition defaultInstance; - public static Condition getDefaultInstance() { - return defaultInstance; } - - public Condition getDefaultInstanceForType() { - return defaultInstance; + private Condition() { + row_ = com.google.protobuf.ByteString.EMPTY; + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + compareType_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Condition( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7260,7 +7449,7 @@ public final class ClientProtos { unknownFields.mergeVarintField(4, rawValue); } else { bitField0_ |= 0x00000008; - compareType_ = value; + compareType_ = rawValue; } break; } @@ -7283,7 +7472,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7294,30 +7483,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Condition parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Condition(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; /** @@ -7333,7 +7506,6 @@ public final class ClientProtos { return row_; } - // required bytes family = 2; public static final int FAMILY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString family_; /** @@ -7349,7 +7521,6 @@ public final class ClientProtos { return family_; } - // required bytes qualifier = 3; public static final int QUALIFIER_FIELD_NUMBER = 3; private com.google.protobuf.ByteString qualifier_; /** @@ -7365,9 +7536,8 @@ public final class ClientProtos { return qualifier_; } - // required .hbase.pb.CompareType compare_type = 4; public static final int COMPARE_TYPE_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType compareType_; + private int compareType_; /** * required .hbase.pb.CompareType compare_type = 4; */ @@ -7378,10 +7548,10 @@ public final class ClientProtos { * required .hbase.pb.CompareType compare_type = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareType() { - return compareType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.valueOf(compareType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS : result; } - // required .hbase.pb.Comparator comparator = 5; public static final int COMPARATOR_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_; /** @@ -7394,26 +7564,20 @@ public final class ClientProtos { * required .hbase.pb.Comparator comparator = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getComparator() { - return comparator_; + return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } /** * required .hbase.pb.Comparator comparator = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { - return comparator_; + return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - compareType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; - comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRow()) { memoizedIsInitialized = 0; @@ -7445,7 +7609,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, row_); } @@ -7456,17 +7619,16 @@ public final class ClientProtos { output.writeBytes(3, qualifier_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeEnum(4, compareType_.getNumber()); + output.writeEnum(4, compareType_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, comparator_); + output.writeMessage(5, getComparator()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -7484,25 +7646,19 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(4, compareType_.getNumber()); + .computeEnumSize(4, compareType_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, comparator_); + .computeMessageSize(5, getComparator()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7530,20 +7686,17 @@ public final class ClientProtos { } result = result && (hasCompareType() == other.hasCompareType()); if (hasCompareType()) { - result = result && - (getCompareType() == other.getCompareType()); + result = result && compareType_ == other.compareType_; } result = result && (hasComparator() == other.hasComparator()); if (hasComparator()) { result = result && getComparator() .equals(other.getComparator()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7565,13 +7718,13 @@ public final class ClientProtos { } if (hasCompareType()) { hash = (37 * hash) + COMPARE_TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getCompareType()); + hash = (53 * hash) + compareType_; } if (hasComparator()) { hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; hash = (53 * hash) + getComparator().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7599,69 +7752,80 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.Condition} - * *
      **
      * Condition to check if the value of a given cell (row,
      * family, qualifier) matches a value via a given comparator.
-     *
      * Condition is used in check and mutate operations.
      * 
+ * + * Protobuf type {@code hbase.pb.Condition} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Condition) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7674,19 +7838,16 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getComparatorFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -7695,10 +7856,10 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); qualifier_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); - compareType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; + compareType_ = 0; bitField0_ = (bitField0_ & ~0x00000008); if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + comparator_ = null; } else { comparatorBuilder_.clear(); } @@ -7706,10 +7867,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor; @@ -7760,6 +7917,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition)other); @@ -7786,33 +7969,28 @@ public final class ClientProtos { if (other.hasComparator()) { mergeComparator(other.getComparator()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRow()) { - return false; } if (!hasFamily()) { - return false; } if (!hasQualifier()) { - return false; } if (!hasCompareType()) { - return false; } if (!hasComparator()) { - return false; } if (!getComparator().isInitialized()) { - return false; } return true; @@ -7827,7 +8005,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7837,7 +8015,6 @@ public final class ClientProtos { } private int bitField0_; - // required bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes row = 1; @@ -7873,7 +8050,6 @@ public final class ClientProtos { return this; } - // required bytes family = 2; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family = 2; @@ -7909,7 +8085,6 @@ public final class ClientProtos { return this; } - // required bytes qualifier = 3; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes qualifier = 3; @@ -7945,8 +8120,7 @@ public final class ClientProtos { return this; } - // required .hbase.pb.CompareType compare_type = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType compareType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; + private int compareType_ = 0; /** * required .hbase.pb.CompareType compare_type = 4; */ @@ -7957,7 +8131,8 @@ public final class ClientProtos { * required .hbase.pb.CompareType compare_type = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareType() { - return compareType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.valueOf(compareType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS : result; } /** * required .hbase.pb.CompareType compare_type = 4; @@ -7967,7 +8142,7 @@ public final class ClientProtos { throw new NullPointerException(); } bitField0_ |= 0x00000008; - compareType_ = value; + compareType_ = value.getNumber(); onChanged(); return this; } @@ -7976,14 +8151,13 @@ public final class ClientProtos { */ public Builder clearCompareType() { bitField0_ = (bitField0_ & ~0x00000008); - compareType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; + compareType_ = 0; onChanged(); return this; } - // required .hbase.pb.Comparator comparator = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; /** * required .hbase.pb.Comparator comparator = 5; @@ -7996,7 +8170,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getComparator() { if (comparatorBuilder_ == null) { - return comparator_; + return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } else { return comparatorBuilder_.getMessage(); } @@ -8037,6 +8211,7 @@ public final class ClientProtos { public Builder mergeComparator(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && + comparator_ != null && comparator_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); @@ -8055,7 +8230,7 @@ public final class ClientProtos { */ public Builder clearComparator() { if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + comparator_ = null; onChanged(); } else { comparatorBuilder_.clear(); @@ -8078,41 +8253,79 @@ public final class ClientProtos { if (comparatorBuilder_ != null) { return comparatorBuilder_.getMessageOrBuilder(); } else { - return comparator_; + return comparator_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } } /** * required .hbase.pb.Comparator comparator = 5; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder() { if (comparatorBuilder_ == null) { - comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( - comparator_, + getComparator(), getParentForChildren(), isClean()); comparator_ = null; } return comparatorBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Condition) } + // @@protoc_insertion_point(class_scope:hbase.pb.Condition) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition DEFAULT_INSTANCE; static { - defaultInstance = new Condition(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Condition parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Condition(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Condition) } - public interface MutationProtoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MutationProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MutationProto) + com.google.protobuf.MessageOrBuilder { - // optional bytes row = 1; /** * optional bytes row = 1; */ @@ -8122,7 +8335,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getRow(); - // optional .hbase.pb.MutationProto.MutationType mutate_type = 2; /** * optional .hbase.pb.MutationProto.MutationType mutate_type = 2; */ @@ -8132,7 +8344,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType(); - // repeated .hbase.pb.MutationProto.ColumnValue column_value = 3; /** * repeated .hbase.pb.MutationProto.ColumnValue column_value = 3; */ @@ -8157,7 +8368,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder( int index); - // optional uint64 timestamp = 4; /** * optional uint64 timestamp = 4; */ @@ -8167,7 +8377,6 @@ public final class ClientProtos { */ long getTimestamp(); - // repeated .hbase.pb.NameBytesPair attribute = 5; /** * repeated .hbase.pb.NameBytesPair attribute = 5; */ @@ -8192,7 +8401,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); - // optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT]; /** * optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT]; */ @@ -8202,39 +8410,35 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability getDurability(); - // optional .hbase.pb.TimeRange time_range = 7; /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
      * For some mutations, a result may be returned, in which case,
      * time range can be specified for potential performance gain
      * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ boolean hasTimeRange(); /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
      * For some mutations, a result may be returned, in which case,
      * time range can be specified for potential performance gain
      * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
      * For some mutations, a result may be returned, in which case,
      * time range can be specified for potential performance gain
      * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - // optional int32 associated_cell_count = 8; /** - * optional int32 associated_cell_count = 8; - * *
      * The below count is set when the associated cells are NOT
      * part of this protobuf message; they are passed alongside
@@ -8243,11 +8447,11 @@ public final class ClientProtos {
      * ours.  NOTE: This is different from the pb managed cell_count of the
      * 'cell' field above which is non-null when the cells are pb'd.
      * 
+ * + * optional int32 associated_cell_count = 8; */ boolean hasAssociatedCellCount(); /** - * optional int32 associated_cell_count = 8; - * *
      * The below count is set when the associated cells are NOT
      * part of this protobuf message; they are passed alongside
@@ -8256,10 +8460,11 @@ public final class ClientProtos {
      * ours.  NOTE: This is different from the pb managed cell_count of the
      * 'cell' field above which is non-null when the cells are pb'd.
      * 
+ * + * optional int32 associated_cell_count = 8; */ int getAssociatedCellCount(); - // optional uint64 nonce = 9; /** * optional uint64 nonce = 9; */ @@ -8270,8 +8475,6 @@ public final class ClientProtos { long getNonce(); } /** - * Protobuf type {@code hbase.pb.MutationProto} - * *
    **
    * A specific mutation inside a mutate request.
@@ -8280,37 +8483,38 @@ public final class ClientProtos {
    * only metadata present because data is being carried
    * elsewhere outside of pb.
    * 
+ * + * Protobuf type {@code hbase.pb.MutationProto} */ - public static final class MutationProto extends - com.google.protobuf.GeneratedMessage - implements MutationProtoOrBuilder { + public static final class MutationProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MutationProto) + MutationProtoOrBuilder { // Use MutationProto.newBuilder() to construct. - private MutationProto(com.google.protobuf.GeneratedMessage.Builder builder) { + private MutationProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MutationProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MutationProto defaultInstance; - public static MutationProto getDefaultInstance() { - return defaultInstance; } - - public MutationProto getDefaultInstanceForType() { - return defaultInstance; + private MutationProto() { + row_ = com.google.protobuf.ByteString.EMPTY; + mutateType_ = 0; + columnValue_ = java.util.Collections.emptyList(); + timestamp_ = 0L; + attribute_ = java.util.Collections.emptyList(); + durability_ = 0; + associatedCellCount_ = 0; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MutationProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8341,7 +8545,7 @@ public final class ClientProtos { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; - mutateType_ = value; + mutateType_ = rawValue; } break; } @@ -8350,7 +8554,8 @@ public final class ClientProtos { columnValue_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - columnValue_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.PARSER, extensionRegistry)); + columnValue_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.PARSER, extensionRegistry)); break; } case 32: { @@ -8363,7 +8568,8 @@ public final class ClientProtos { attribute_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000010; } - attribute_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); + attribute_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); break; } case 48: { @@ -8373,7 +8579,7 @@ public final class ClientProtos { unknownFields.mergeVarintField(6, rawValue); } else { bitField0_ |= 0x00000008; - durability_ = value; + durability_ = rawValue; } break; } @@ -8406,7 +8612,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { columnValue_ = java.util.Collections.unmodifiableList(columnValue_); @@ -8423,28 +8629,13 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MutationProto parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MutationProto(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.MutationProto.Durability} */ @@ -8453,23 +8644,23 @@ public final class ClientProtos { /** * USE_DEFAULT = 0; */ - USE_DEFAULT(0, 0), + USE_DEFAULT(0), /** * SKIP_WAL = 1; */ - SKIP_WAL(1, 1), + SKIP_WAL(1), /** * ASYNC_WAL = 2; */ - ASYNC_WAL(2, 2), + ASYNC_WAL(2), /** * SYNC_WAL = 3; */ - SYNC_WAL(3, 3), + SYNC_WAL(3), /** * FSYNC_WAL = 4; */ - FSYNC_WAL(4, 4), + FSYNC_WAL(4), ; /** @@ -8494,9 +8685,19 @@ public final class ClientProtos { public static final int FSYNC_WAL_VALUE = 4; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static Durability valueOf(int value) { + return forNumber(value); + } + + public static Durability forNumber(int value) { switch (value) { case 0: return USE_DEFAULT; case 1: return SKIP_WAL; @@ -8511,17 +8712,17 @@ public final class ClientProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + Durability> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public Durability findValueByNumber(int number) { - return Durability.valueOf(number); + return Durability.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -8543,11 +8744,9 @@ public final class ClientProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private Durability(int index, int value) { - this.index = index; + private Durability(int value) { this.value = value; } @@ -8562,19 +8761,19 @@ public final class ClientProtos { /** * APPEND = 0; */ - APPEND(0, 0), + APPEND(0), /** * INCREMENT = 1; */ - INCREMENT(1, 1), + INCREMENT(1), /** * PUT = 2; */ - PUT(2, 2), + PUT(2), /** * DELETE = 3; */ - DELETE(3, 3), + DELETE(3), ; /** @@ -8595,9 +8794,19 @@ public final class ClientProtos { public static final int DELETE_VALUE = 3; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static MutationType valueOf(int value) { + return forNumber(value); + } + + public static MutationType forNumber(int value) { switch (value) { case 0: return APPEND; case 1: return INCREMENT; @@ -8611,17 +8820,17 @@ public final class ClientProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + MutationType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public MutationType findValueByNumber(int number) { - return MutationType.valueOf(number); + return MutationType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -8643,11 +8852,9 @@ public final class ClientProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private MutationType(int index, int value) { - this.index = index; + private MutationType(int value) { this.value = value; } @@ -8662,19 +8869,19 @@ public final class ClientProtos { /** * DELETE_ONE_VERSION = 0; */ - DELETE_ONE_VERSION(0, 0), + DELETE_ONE_VERSION(0), /** * DELETE_MULTIPLE_VERSIONS = 1; */ - DELETE_MULTIPLE_VERSIONS(1, 1), + DELETE_MULTIPLE_VERSIONS(1), /** * DELETE_FAMILY = 2; */ - DELETE_FAMILY(2, 2), + DELETE_FAMILY(2), /** * DELETE_FAMILY_VERSION = 3; */ - DELETE_FAMILY_VERSION(3, 3), + DELETE_FAMILY_VERSION(3), ; /** @@ -8695,9 +8902,19 @@ public final class ClientProtos { public static final int DELETE_FAMILY_VERSION_VALUE = 3; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static DeleteType valueOf(int value) { + return forNumber(value); + } + + public static DeleteType forNumber(int value) { switch (value) { case 0: return DELETE_ONE_VERSION; case 1: return DELETE_MULTIPLE_VERSIONS; @@ -8711,17 +8928,17 @@ public final class ClientProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + DeleteType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public DeleteType findValueByNumber(int number) { - return DeleteType.valueOf(number); + return DeleteType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -8743,21 +8960,19 @@ public final class ClientProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private DeleteType(int index, int value) { - this.index = index; + private DeleteType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.MutationProto.DeleteType) } - public interface ColumnValueOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ColumnValueOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MutationProto.ColumnValue) + com.google.protobuf.MessageOrBuilder { - // required bytes family = 1; /** * required bytes family = 1; */ @@ -8767,7 +8982,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getFamily(); - // repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2; /** * repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2; */ @@ -8795,36 +9009,29 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.MutationProto.ColumnValue} */ - public static final class ColumnValue extends - com.google.protobuf.GeneratedMessage - implements ColumnValueOrBuilder { + public static final class ColumnValue extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MutationProto.ColumnValue) + ColumnValueOrBuilder { // Use ColumnValue.newBuilder() to construct. - private ColumnValue(com.google.protobuf.GeneratedMessage.Builder builder) { + private ColumnValue(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ColumnValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnValue defaultInstance; - public static ColumnValue getDefaultInstance() { - return defaultInstance; } - - public ColumnValue getDefaultInstanceForType() { - return defaultInstance; + private ColumnValue() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifierValue_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ColumnValue( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8853,7 +9060,8 @@ public final class ClientProtos { qualifierValue_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - qualifierValue_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.PARSER, extensionRegistry)); + qualifierValue_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.PARSER, extensionRegistry)); break; } } @@ -8862,7 +9070,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); @@ -8876,32 +9084,17 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnValue parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnValue(input, extensionRegistry); - } - }; + public interface QualifierValueOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MutationProto.ColumnValue.QualifierValue) + com.google.protobuf.MessageOrBuilder { - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public interface QualifierValueOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes qualifier = 1; /** * optional bytes qualifier = 1; */ @@ -8911,7 +9104,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getQualifier(); - // optional bytes value = 2; /** * optional bytes value = 2; */ @@ -8921,7 +9113,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getValue(); - // optional uint64 timestamp = 3; /** * optional uint64 timestamp = 3; */ @@ -8931,7 +9122,6 @@ public final class ClientProtos { */ long getTimestamp(); - // optional .hbase.pb.MutationProto.DeleteType delete_type = 4; /** * optional .hbase.pb.MutationProto.DeleteType delete_type = 4; */ @@ -8941,7 +9131,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType(); - // optional bytes tags = 5; /** * optional bytes tags = 5; */ @@ -8954,36 +9143,32 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.MutationProto.ColumnValue.QualifierValue} */ - public static final class QualifierValue extends - com.google.protobuf.GeneratedMessage - implements QualifierValueOrBuilder { + public static final class QualifierValue extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MutationProto.ColumnValue.QualifierValue) + QualifierValueOrBuilder { // Use QualifierValue.newBuilder() to construct. - private QualifierValue(com.google.protobuf.GeneratedMessage.Builder builder) { + private QualifierValue(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private QualifierValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final QualifierValue defaultInstance; - public static QualifierValue getDefaultInstance() { - return defaultInstance; } - - public QualifierValue getDefaultInstanceForType() { - return defaultInstance; + private QualifierValue() { + qualifier_ = com.google.protobuf.ByteString.EMPTY; + value_ = com.google.protobuf.ByteString.EMPTY; + timestamp_ = 0L; + deleteType_ = 0; + tags_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private QualifierValue( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -9024,7 +9209,7 @@ public final class ClientProtos { unknownFields.mergeVarintField(4, rawValue); } else { bitField0_ |= 0x00000008; - deleteType_ = value; + deleteType_ = rawValue; } break; } @@ -9039,7 +9224,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -9050,30 +9235,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public QualifierValue parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new QualifierValue(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bytes qualifier = 1; public static final int QUALIFIER_FIELD_NUMBER = 1; private com.google.protobuf.ByteString qualifier_; /** @@ -9089,7 +9258,6 @@ public final class ClientProtos { return qualifier_; } - // optional bytes value = 2; public static final int VALUE_FIELD_NUMBER = 2; private com.google.protobuf.ByteString value_; /** @@ -9105,7 +9273,6 @@ public final class ClientProtos { return value_; } - // optional uint64 timestamp = 3; public static final int TIMESTAMP_FIELD_NUMBER = 3; private long timestamp_; /** @@ -9121,9 +9288,8 @@ public final class ClientProtos { return timestamp_; } - // optional .hbase.pb.MutationProto.DeleteType delete_type = 4; public static final int DELETE_TYPE_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_; + private int deleteType_; /** * optional .hbase.pb.MutationProto.DeleteType delete_type = 4; */ @@ -9134,10 +9300,10 @@ public final class ClientProtos { * optional .hbase.pb.MutationProto.DeleteType delete_type = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() { - return deleteType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(deleteType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION : result; } - // optional bytes tags = 5; public static final int TAGS_FIELD_NUMBER = 5; private com.google.protobuf.ByteString tags_; /** @@ -9153,17 +9319,11 @@ public final class ClientProtos { return tags_; } - private void initFields() { - qualifier_ = com.google.protobuf.ByteString.EMPTY; - value_ = com.google.protobuf.ByteString.EMPTY; - timestamp_ = 0L; - deleteType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION; - tags_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -9171,7 +9331,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, qualifier_); } @@ -9182,17 +9341,16 @@ public final class ClientProtos { output.writeUInt64(3, timestamp_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeEnum(4, deleteType_.getNumber()); + output.writeEnum(4, deleteType_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, tags_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -9210,25 +9368,19 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(4, deleteType_.getNumber()); + .computeEnumSize(4, deleteType_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(5, tags_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9256,20 +9408,17 @@ public final class ClientProtos { } result = result && (hasDeleteType() == other.hasDeleteType()); if (hasDeleteType()) { - result = result && - (getDeleteType() == other.getDeleteType()); + result = result && deleteType_ == other.deleteType_; } result = result && (hasTags() == other.hasTags()); if (hasTags()) { result = result && getTags() .equals(other.getTags()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9287,17 +9436,18 @@ public final class ClientProtos { } if (hasTimestamp()) { hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimestamp()); } if (hasDeleteType()) { hash = (37 * hash) + DELETE_TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getDeleteType()); + hash = (53 * hash) + deleteType_; } if (hasTags()) { hash = (37 * hash) + TAGS_FIELD_NUMBER; hash = (53 * hash) + getTags().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9325,46 +9475,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -9372,14 +9533,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.MutationProto.ColumnValue.QualifierValue} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MutationProto.ColumnValue.QualifierValue) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9392,18 +9554,15 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); qualifier_ = com.google.protobuf.ByteString.EMPTY; @@ -9412,17 +9571,13 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00000002); timestamp_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); - deleteType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION; + deleteType_ = 0; bitField0_ = (bitField0_ & ~0x00000008); tags_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor; @@ -9469,6 +9624,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)other); @@ -9495,7 +9676,8 @@ public final class ClientProtos { if (other.hasTags()) { setTags(other.getTags()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -9512,7 +9694,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9522,7 +9704,6 @@ public final class ClientProtos { } private int bitField0_; - // optional bytes qualifier = 1; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes qualifier = 1; @@ -9558,7 +9739,6 @@ public final class ClientProtos { return this; } - // optional bytes value = 2; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes value = 2; @@ -9594,7 +9774,6 @@ public final class ClientProtos { return this; } - // optional uint64 timestamp = 3; private long timestamp_ ; /** * optional uint64 timestamp = 3; @@ -9627,8 +9806,7 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.MutationProto.DeleteType delete_type = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION; + private int deleteType_ = 0; /** * optional .hbase.pb.MutationProto.DeleteType delete_type = 4; */ @@ -9639,7 +9817,8 @@ public final class ClientProtos { * optional .hbase.pb.MutationProto.DeleteType delete_type = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() { - return deleteType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(deleteType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION : result; } /** * optional .hbase.pb.MutationProto.DeleteType delete_type = 4; @@ -9649,7 +9828,7 @@ public final class ClientProtos { throw new NullPointerException(); } bitField0_ |= 0x00000008; - deleteType_ = value; + deleteType_ = value.getNumber(); onChanged(); return this; } @@ -9658,12 +9837,11 @@ public final class ClientProtos { */ public Builder clearDeleteType() { bitField0_ = (bitField0_ & ~0x00000008); - deleteType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION; + deleteType_ = 0; onChanged(); return this; } - // optional bytes tags = 5; private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes tags = 5; @@ -9698,20 +9876,56 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto.ColumnValue.QualifierValue) } + // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto.ColumnValue.QualifierValue) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue DEFAULT_INSTANCE; static { - defaultInstance = new QualifierValue(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public QualifierValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new QualifierValue(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto.ColumnValue.QualifierValue) } private int bitField0_; - // required bytes family = 1; public static final int FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString family_; /** @@ -9727,7 +9941,6 @@ public final class ClientProtos { return family_; } - // repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2; public static final int QUALIFIER_VALUE_FIELD_NUMBER = 2; private java.util.List qualifierValue_; /** @@ -9763,14 +9976,11 @@ public final class ClientProtos { return qualifierValue_.get(index); } - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - qualifierValue_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFamily()) { memoizedIsInitialized = 0; @@ -9782,19 +9992,17 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, family_); } for (int i = 0; i < qualifierValue_.size(); i++) { output.writeMessage(2, qualifierValue_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -9806,19 +10014,13 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, qualifierValue_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9836,12 +10038,10 @@ public final class ClientProtos { } result = result && getQualifierValueList() .equals(other.getQualifierValueList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9857,7 +10057,7 @@ public final class ClientProtos { hash = (37 * hash) + QUALIFIER_VALUE_FIELD_NUMBER; hash = (53 * hash) + getQualifierValueList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9885,46 +10085,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -9932,14 +10143,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.MutationProto.ColumnValue} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MutationProto.ColumnValue) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9952,19 +10164,16 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getQualifierValueFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; @@ -9978,10 +10187,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor; @@ -10021,6 +10226,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue)other); @@ -10054,20 +10285,20 @@ public final class ClientProtos { qualifierValue_ = other.qualifierValue_; bitField0_ = (bitField0_ & ~0x00000002); qualifierValueBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getQualifierValueFieldBuilder() : null; } else { qualifierValueBuilder_.addAllMessages(other.qualifierValue_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFamily()) { - return false; } return true; @@ -10082,7 +10313,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -10092,7 +10323,6 @@ public final class ClientProtos { } private int bitField0_; - // required bytes family = 1; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family = 1; @@ -10128,7 +10358,6 @@ public final class ClientProtos { return this; } - // repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2; private java.util.List qualifierValue_ = java.util.Collections.emptyList(); private void ensureQualifierValueIsMutable() { @@ -10138,7 +10367,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; /** @@ -10270,7 +10499,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); - super.addAll(values, qualifierValue_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, qualifierValue_); onChanged(); } else { qualifierValueBuilder_.addAllMessages(values); @@ -10353,11 +10583,11 @@ public final class ClientProtos { getQualifierValueBuilderList() { return getQualifierValueFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> getQualifierValueFieldBuilder() { if (qualifierValueBuilder_ == null) { - qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>( qualifierValue_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -10367,20 +10597,56 @@ public final class ClientProtos { } return qualifierValueBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto.ColumnValue) } + // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto.ColumnValue) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue DEFAULT_INSTANCE; static { - defaultInstance = new ColumnValue(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ColumnValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnValue(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto.ColumnValue) } private int bitField0_; - // optional bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; /** @@ -10396,9 +10662,8 @@ public final class ClientProtos { return row_; } - // optional .hbase.pb.MutationProto.MutationType mutate_type = 2; public static final int MUTATE_TYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_; + private int mutateType_; /** * optional .hbase.pb.MutationProto.MutationType mutate_type = 2; */ @@ -10409,10 +10674,10 @@ public final class ClientProtos { * optional .hbase.pb.MutationProto.MutationType mutate_type = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() { - return mutateType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(mutateType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND : result; } - // repeated .hbase.pb.MutationProto.ColumnValue column_value = 3; public static final int COLUMN_VALUE_FIELD_NUMBER = 3; private java.util.List columnValue_; /** @@ -10448,7 +10713,6 @@ public final class ClientProtos { return columnValue_.get(index); } - // optional uint64 timestamp = 4; public static final int TIMESTAMP_FIELD_NUMBER = 4; private long timestamp_; /** @@ -10464,7 +10728,6 @@ public final class ClientProtos { return timestamp_; } - // repeated .hbase.pb.NameBytesPair attribute = 5; public static final int ATTRIBUTE_FIELD_NUMBER = 5; private java.util.List attribute_; /** @@ -10500,9 +10763,8 @@ public final class ClientProtos { return attribute_.get(index); } - // optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT]; public static final int DURABILITY_FIELD_NUMBER = 6; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability durability_; + private int durability_; /** * optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT]; */ @@ -10513,52 +10775,49 @@ public final class ClientProtos { * optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() { - return durability_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.valueOf(durability_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT : result; } - // optional .hbase.pb.TimeRange time_range = 7; public static final int TIME_RANGE_FIELD_NUMBER = 7; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_; /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
      * For some mutations, a result may be returned, in which case,
      * time range can be specified for potential performance gain
      * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
      * For some mutations, a result may be returned, in which case,
      * time range can be specified for potential performance gain
      * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
      * For some mutations, a result may be returned, in which case,
      * time range can be specified for potential performance gain
      * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } - // optional int32 associated_cell_count = 8; public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 8; private int associatedCellCount_; /** - * optional int32 associated_cell_count = 8; - * *
      * The below count is set when the associated cells are NOT
      * part of this protobuf message; they are passed alongside
@@ -10567,13 +10826,13 @@ public final class ClientProtos {
      * ours.  NOTE: This is different from the pb managed cell_count of the
      * 'cell' field above which is non-null when the cells are pb'd.
      * 
+ * + * optional int32 associated_cell_count = 8; */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * optional int32 associated_cell_count = 8; - * *
      * The below count is set when the associated cells are NOT
      * part of this protobuf message; they are passed alongside
@@ -10582,12 +10841,13 @@ public final class ClientProtos {
      * ours.  NOTE: This is different from the pb managed cell_count of the
      * 'cell' field above which is non-null when the cells are pb'd.
      * 
+ * + * optional int32 associated_cell_count = 8; */ public int getAssociatedCellCount() { return associatedCellCount_; } - // optional uint64 nonce = 9; public static final int NONCE_FIELD_NUMBER = 9; private long nonce_; /** @@ -10603,21 +10863,11 @@ public final class ClientProtos { return nonce_; } - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - mutateType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; - columnValue_ = java.util.Collections.emptyList(); - timestamp_ = 0L; - attribute_ = java.util.Collections.emptyList(); - durability_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT; - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - associatedCellCount_ = 0; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getColumnValueCount(); i++) { if (!getColumnValue(i).isInitialized()) { @@ -10637,12 +10887,11 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, row_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, mutateType_.getNumber()); + output.writeEnum(2, mutateType_); } for (int i = 0; i < columnValue_.size(); i++) { output.writeMessage(3, columnValue_.get(i)); @@ -10654,10 +10903,10 @@ public final class ClientProtos { output.writeMessage(5, attribute_.get(i)); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeEnum(6, durability_.getNumber()); + output.writeEnum(6, durability_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(7, timeRange_); + output.writeMessage(7, getTimeRange()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeInt32(8, associatedCellCount_); @@ -10665,12 +10914,11 @@ public final class ClientProtos { if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt64(9, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -10680,7 +10928,7 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, mutateType_.getNumber()); + .computeEnumSize(2, mutateType_); } for (int i = 0; i < columnValue_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -10696,11 +10944,11 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(6, durability_.getNumber()); + .computeEnumSize(6, durability_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(7, timeRange_); + .computeMessageSize(7, getTimeRange()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream @@ -10710,19 +10958,13 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(9, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -10740,8 +10982,7 @@ public final class ClientProtos { } result = result && (hasMutateType() == other.hasMutateType()); if (hasMutateType()) { - result = result && - (getMutateType() == other.getMutateType()); + result = result && mutateType_ == other.mutateType_; } result = result && getColumnValueList() .equals(other.getColumnValueList()); @@ -10754,8 +10995,7 @@ public final class ClientProtos { .equals(other.getAttributeList()); result = result && (hasDurability() == other.hasDurability()); if (hasDurability()) { - result = result && - (getDurability() == other.getDurability()); + result = result && durability_ == other.durability_; } result = result && (hasTimeRange() == other.hasTimeRange()); if (hasTimeRange()) { @@ -10772,12 +11012,10 @@ public final class ClientProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -10791,7 +11029,7 @@ public final class ClientProtos { } if (hasMutateType()) { hash = (37 * hash) + MUTATE_TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getMutateType()); + hash = (53 * hash) + mutateType_; } if (getColumnValueCount() > 0) { hash = (37 * hash) + COLUMN_VALUE_FIELD_NUMBER; @@ -10799,7 +11037,8 @@ public final class ClientProtos { } if (hasTimestamp()) { hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimestamp()); } if (getAttributeCount() > 0) { hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; @@ -10807,7 +11046,7 @@ public final class ClientProtos { } if (hasDurability()) { hash = (37 * hash) + DURABILITY_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getDurability()); + hash = (53 * hash) + durability_; } if (hasTimeRange()) { hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; @@ -10819,9 +11058,10 @@ public final class ClientProtos { } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -10849,52 +11089,61 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.MutationProto} - * *
      **
      * A specific mutation inside a mutate request.
@@ -10903,16 +11152,19 @@ public final class ClientProtos {
      * only metadata present because data is being carried
      * elsewhere outside of pb.
      * 
+ * + * Protobuf type {@code hbase.pb.MutationProto} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MutationProto) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10925,26 +11177,23 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getColumnValueFieldBuilder(); getAttributeFieldBuilder(); getTimeRangeFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); - mutateType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; + mutateType_ = 0; bitField0_ = (bitField0_ & ~0x00000002); if (columnValueBuilder_ == null) { columnValue_ = java.util.Collections.emptyList(); @@ -10960,10 +11209,10 @@ public final class ClientProtos { } else { attributeBuilder_.clear(); } - durability_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT; + durability_ = 0; bitField0_ = (bitField0_ & ~0x00000020); if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + timeRange_ = null; } else { timeRangeBuilder_.clear(); } @@ -10975,10 +11224,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor; @@ -11055,6 +11300,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto)other); @@ -11091,7 +11362,7 @@ public final class ClientProtos { columnValue_ = other.columnValue_; bitField0_ = (bitField0_ & ~0x00000004); columnValueBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getColumnValueFieldBuilder() : null; } else { columnValueBuilder_.addAllMessages(other.columnValue_); @@ -11120,7 +11391,7 @@ public final class ClientProtos { attribute_ = other.attribute_; bitField0_ = (bitField0_ & ~0x00000010); attributeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributeFieldBuilder() : null; } else { attributeBuilder_.addAllMessages(other.attribute_); @@ -11139,20 +11410,19 @@ public final class ClientProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getColumnValueCount(); i++) { if (!getColumnValue(i).isInitialized()) { - return false; } } for (int i = 0; i < getAttributeCount(); i++) { if (!getAttribute(i).isInitialized()) { - return false; } } @@ -11168,7 +11438,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11178,7 +11448,6 @@ public final class ClientProtos { } private int bitField0_; - // optional bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes row = 1; @@ -11214,8 +11483,7 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.MutationProto.MutationType mutate_type = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; + private int mutateType_ = 0; /** * optional .hbase.pb.MutationProto.MutationType mutate_type = 2; */ @@ -11226,7 +11494,8 @@ public final class ClientProtos { * optional .hbase.pb.MutationProto.MutationType mutate_type = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() { - return mutateType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(mutateType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND : result; } /** * optional .hbase.pb.MutationProto.MutationType mutate_type = 2; @@ -11236,7 +11505,7 @@ public final class ClientProtos { throw new NullPointerException(); } bitField0_ |= 0x00000002; - mutateType_ = value; + mutateType_ = value.getNumber(); onChanged(); return this; } @@ -11245,12 +11514,11 @@ public final class ClientProtos { */ public Builder clearMutateType() { bitField0_ = (bitField0_ & ~0x00000002); - mutateType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; + mutateType_ = 0; onChanged(); return this; } - // repeated .hbase.pb.MutationProto.ColumnValue column_value = 3; private java.util.List columnValue_ = java.util.Collections.emptyList(); private void ensureColumnValueIsMutable() { @@ -11260,7 +11528,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> columnValueBuilder_; /** @@ -11392,7 +11660,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); - super.addAll(values, columnValue_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, columnValue_); onChanged(); } else { columnValueBuilder_.addAllMessages(values); @@ -11475,11 +11744,11 @@ public final class ClientProtos { getColumnValueBuilderList() { return getColumnValueFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> getColumnValueFieldBuilder() { if (columnValueBuilder_ == null) { - columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>( columnValue_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -11490,7 +11759,6 @@ public final class ClientProtos { return columnValueBuilder_; } - // optional uint64 timestamp = 4; private long timestamp_ ; /** * optional uint64 timestamp = 4; @@ -11523,7 +11791,6 @@ public final class ClientProtos { return this; } - // repeated .hbase.pb.NameBytesPair attribute = 5; private java.util.List attribute_ = java.util.Collections.emptyList(); private void ensureAttributeIsMutable() { @@ -11533,7 +11800,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; /** @@ -11665,7 +11932,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); - super.addAll(values, attribute_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, attribute_); onChanged(); } else { attributeBuilder_.addAllMessages(values); @@ -11748,11 +12016,11 @@ public final class ClientProtos { getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder() { if (attributeBuilder_ == null) { - attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( attribute_, ((bitField0_ & 0x00000010) == 0x00000010), @@ -11763,8 +12031,7 @@ public final class ClientProtos { return attributeBuilder_; } - // optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT]; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability durability_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT; + private int durability_ = 0; /** * optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT]; */ @@ -11775,7 +12042,8 @@ public final class ClientProtos { * optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() { - return durability_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.valueOf(durability_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT : result; } /** * optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT]; @@ -11785,7 +12053,7 @@ public final class ClientProtos { throw new NullPointerException(); } bitField0_ |= 0x00000020; - durability_ = value; + durability_ = value.getNumber(); onChanged(); return this; } @@ -11794,48 +12062,47 @@ public final class ClientProtos { */ public Builder clearDurability() { bitField0_ = (bitField0_ & ~0x00000020); - durability_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT; + durability_ = 0; onChanged(); return this; } - // optional .hbase.pb.TimeRange time_range = 7; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
        * For some mutations, a result may be returned, in which case,
        * time range can be specified for potential performance gain
        * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
        * For some mutations, a result may be returned, in which case,
        * time range can be specified for potential performance gain
        * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } else { return timeRangeBuilder_.getMessage(); } } /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
        * For some mutations, a result may be returned, in which case,
        * time range can be specified for potential performance gain
        * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public Builder setTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { @@ -11851,12 +12118,12 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
        * For some mutations, a result may be returned, in which case,
        * time range can be specified for potential performance gain
        * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public Builder setTimeRange( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { @@ -11870,16 +12137,17 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
        * For some mutations, a result may be returned, in which case,
        * time range can be specified for potential performance gain
        * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public Builder mergeTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000040) == 0x00000040) && + timeRange_ != null && timeRange_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); @@ -11894,16 +12162,16 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
        * For some mutations, a result may be returned, in which case,
        * time range can be specified for potential performance gain
        * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + timeRange_ = null; onChanged(); } else { timeRangeBuilder_.clear(); @@ -11912,12 +12180,12 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
        * For some mutations, a result may be returned, in which case,
        * time range can be specified for potential performance gain
        * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { bitField0_ |= 0x00000040; @@ -11925,35 +12193,36 @@ public final class ClientProtos { return getTimeRangeFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
        * For some mutations, a result may be returned, in which case,
        * time range can be specified for potential performance gain
        * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); } else { - return timeRange_; + return timeRange_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } } /** - * optional .hbase.pb.TimeRange time_range = 7; - * *
        * For some mutations, a result may be returned, in which case,
        * time range can be specified for potential performance gain
        * 
+ * + * optional .hbase.pb.TimeRange time_range = 7; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, + getTimeRange(), getParentForChildren(), isClean()); timeRange_ = null; @@ -11961,11 +12230,8 @@ public final class ClientProtos { return timeRangeBuilder_; } - // optional int32 associated_cell_count = 8; private int associatedCellCount_ ; /** - * optional int32 associated_cell_count = 8; - * *
        * The below count is set when the associated cells are NOT
        * part of this protobuf message; they are passed alongside
@@ -11974,13 +12240,13 @@ public final class ClientProtos {
        * ours.  NOTE: This is different from the pb managed cell_count of the
        * 'cell' field above which is non-null when the cells are pb'd.
        * 
+ * + * optional int32 associated_cell_count = 8; */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** - * optional int32 associated_cell_count = 8; - * *
        * The below count is set when the associated cells are NOT
        * part of this protobuf message; they are passed alongside
@@ -11989,13 +12255,13 @@ public final class ClientProtos {
        * ours.  NOTE: This is different from the pb managed cell_count of the
        * 'cell' field above which is non-null when the cells are pb'd.
        * 
+ * + * optional int32 associated_cell_count = 8; */ public int getAssociatedCellCount() { return associatedCellCount_; } /** - * optional int32 associated_cell_count = 8; - * *
        * The below count is set when the associated cells are NOT
        * part of this protobuf message; they are passed alongside
@@ -12004,6 +12270,8 @@ public final class ClientProtos {
        * ours.  NOTE: This is different from the pb managed cell_count of the
        * 'cell' field above which is non-null when the cells are pb'd.
        * 
+ * + * optional int32 associated_cell_count = 8; */ public Builder setAssociatedCellCount(int value) { bitField0_ |= 0x00000080; @@ -12012,8 +12280,6 @@ public final class ClientProtos { return this; } /** - * optional int32 associated_cell_count = 8; - * *
        * The below count is set when the associated cells are NOT
        * part of this protobuf message; they are passed alongside
@@ -12022,6 +12288,8 @@ public final class ClientProtos {
        * ours.  NOTE: This is different from the pb managed cell_count of the
        * 'cell' field above which is non-null when the cells are pb'd.
        * 
+ * + * optional int32 associated_cell_count = 8; */ public Builder clearAssociatedCellCount() { bitField0_ = (bitField0_ & ~0x00000080); @@ -12030,7 +12298,6 @@ public final class ClientProtos { return this; } - // optional uint64 nonce = 9; private long nonce_ ; /** * optional uint64 nonce = 9; @@ -12062,22 +12329,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto) } + // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto DEFAULT_INSTANCE; static { - defaultInstance = new MutationProto(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MutationProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MutationProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto) } - public interface MutateRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MutateRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MutateRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -12091,7 +12395,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // required .hbase.pb.MutationProto mutation = 2; /** * required .hbase.pb.MutationProto mutation = 2; */ @@ -12105,7 +12408,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder(); - // optional .hbase.pb.Condition condition = 3; /** * optional .hbase.pb.Condition condition = 3; */ @@ -12119,7 +12421,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder(); - // optional uint64 nonce_group = 4; /** * optional uint64 nonce_group = 4; */ @@ -12130,49 +12431,40 @@ public final class ClientProtos { long getNonceGroup(); } /** - * Protobuf type {@code hbase.pb.MutateRequest} - * *
    **
    * The mutate request. Perform a single Mutate operation.
-   *
    * Optionally, you can specify a condition. The mutate
    * will take place only if the condition is met.  Otherwise,
    * the mutate will be ignored.  In the response result,
    * parameter processed is used to indicate if the mutate
    * actually happened.
    * 
+ * + * Protobuf type {@code hbase.pb.MutateRequest} */ - public static final class MutateRequest extends - com.google.protobuf.GeneratedMessage - implements MutateRequestOrBuilder { + public static final class MutateRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MutateRequest) + MutateRequestOrBuilder { // Use MutateRequest.newBuilder() to construct. - private MutateRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private MutateRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MutateRequest defaultInstance; - public static MutateRequest getDefaultInstance() { - return defaultInstance; } - - public MutateRequest getDefaultInstanceForType() { - return defaultInstance; + private MutateRequest() { + nonceGroup_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MutateRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -12241,7 +12533,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -12252,30 +12544,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MutateRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MutateRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -12288,16 +12564,15 @@ public final class ClientProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // required .hbase.pb.MutationProto mutation = 2; public static final int MUTATION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto mutation_; /** @@ -12310,16 +12585,15 @@ public final class ClientProtos { * required .hbase.pb.MutationProto mutation = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getMutation() { - return mutation_; + return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } /** * required .hbase.pb.MutationProto mutation = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { - return mutation_; + return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } - // optional .hbase.pb.Condition condition = 3; public static final int CONDITION_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition condition_; /** @@ -12332,16 +12606,15 @@ public final class ClientProtos { * optional .hbase.pb.Condition condition = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getCondition() { - return condition_; + return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } /** * optional .hbase.pb.Condition condition = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { - return condition_; + return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } - // optional uint64 nonce_group = 4; public static final int NONCE_GROUP_FIELD_NUMBER = 4; private long nonceGroup_; /** @@ -12357,16 +12630,11 @@ public final class ClientProtos { return nonceGroup_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); - condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); - nonceGroup_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -12396,57 +12664,49 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, mutation_); + output.writeMessage(2, getMutation()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, condition_); + output.writeMessage(3, getCondition()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, nonceGroup_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, mutation_); + .computeMessageSize(2, getMutation()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, condition_); + .computeMessageSize(3, getCondition()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, nonceGroup_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -12477,12 +12737,10 @@ public final class ClientProtos { result = result && (getNonceGroup() == other.getNonceGroup()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -12504,9 +12762,10 @@ public final class ClientProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -12534,72 +12793,83 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.MutateRequest} - * *
      **
      * The mutate request. Perform a single Mutate operation.
-     *
      * Optionally, you can specify a condition. The mutate
      * will take place only if the condition is met.  Otherwise,
      * the mutate will be ignored.  In the response result,
      * parameter processed is used to indicate if the mutate
      * actually happened.
      * 
+ * + * Protobuf type {@code hbase.pb.MutateRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MutateRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -12612,37 +12882,34 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getMutationFieldBuilder(); getConditionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (mutationBuilder_ == null) { - mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); + mutation_ = null; } else { mutationBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (conditionBuilder_ == null) { - condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + condition_ = null; } else { conditionBuilder_.clear(); } @@ -12652,10 +12919,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor; @@ -12710,6 +12973,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest)other); @@ -12733,30 +13022,26 @@ public final class ClientProtos { if (other.hasNonceGroup()) { setNonceGroup(other.getNonceGroup()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!hasMutation()) { - return false; } if (!getRegion().isInitialized()) { - return false; } if (!getMutation().isInitialized()) { - return false; } if (hasCondition()) { if (!getCondition().isInitialized()) { - return false; } } @@ -12772,7 +13057,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -12782,9 +13067,8 @@ public final class ClientProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -12797,7 +13081,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -12838,6 +13122,7 @@ public final class ClientProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -12856,7 +13141,7 @@ public final class ClientProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -12879,19 +13164,20 @@ public final class ClientProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -12899,9 +13185,8 @@ public final class ClientProtos { return regionBuilder_; } - // required .hbase.pb.MutationProto mutation = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto mutation_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_; /** * required .hbase.pb.MutationProto mutation = 2; @@ -12914,7 +13199,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getMutation() { if (mutationBuilder_ == null) { - return mutation_; + return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } else { return mutationBuilder_.getMessage(); } @@ -12955,6 +13240,7 @@ public final class ClientProtos { public Builder mergeMutation(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto value) { if (mutationBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + mutation_ != null && mutation_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) { mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial(); @@ -12973,7 +13259,7 @@ public final class ClientProtos { */ public Builder clearMutation() { if (mutationBuilder_ == null) { - mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); + mutation_ = null; onChanged(); } else { mutationBuilder_.clear(); @@ -12996,19 +13282,20 @@ public final class ClientProtos { if (mutationBuilder_ != null) { return mutationBuilder_.getMessageOrBuilder(); } else { - return mutation_; + return mutation_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } } /** * required .hbase.pb.MutationProto mutation = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder> getMutationFieldBuilder() { if (mutationBuilder_ == null) { - mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder< + mutationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder>( - mutation_, + getMutation(), getParentForChildren(), isClean()); mutation_ = null; @@ -13016,9 +13303,8 @@ public final class ClientProtos { return mutationBuilder_; } - // optional .hbase.pb.Condition condition = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition condition_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_; /** * optional .hbase.pb.Condition condition = 3; @@ -13031,7 +13317,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getCondition() { if (conditionBuilder_ == null) { - return condition_; + return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } else { return conditionBuilder_.getMessage(); } @@ -13072,6 +13358,7 @@ public final class ClientProtos { public Builder mergeCondition(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition value) { if (conditionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + condition_ != null && condition_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) { condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); @@ -13090,7 +13377,7 @@ public final class ClientProtos { */ public Builder clearCondition() { if (conditionBuilder_ == null) { - condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + condition_ = null; onChanged(); } else { conditionBuilder_.clear(); @@ -13113,19 +13400,20 @@ public final class ClientProtos { if (conditionBuilder_ != null) { return conditionBuilder_.getMessageOrBuilder(); } else { - return condition_; + return condition_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } } /** * optional .hbase.pb.Condition condition = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder> getConditionFieldBuilder() { if (conditionBuilder_ == null) { - conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + conditionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder>( - condition_, + getCondition(), getParentForChildren(), isClean()); condition_ = null; @@ -13133,7 +13421,6 @@ public final class ClientProtos { return conditionBuilder_; } - // optional uint64 nonce_group = 4; private long nonceGroup_ ; /** * optional uint64 nonce_group = 4; @@ -13165,22 +13452,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MutateRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.MutateRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest DEFAULT_INSTANCE; static { - defaultInstance = new MutateRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MutateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MutateRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MutateRequest) } - public interface MutateResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MutateResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MutateResponse) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.Result result = 1; /** * optional .hbase.pb.Result result = 1; */ @@ -13194,57 +13518,48 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); - // optional bool processed = 2; /** - * optional bool processed = 2; - * *
      * used for mutate to indicate processed only
      * 
+ * + * optional bool processed = 2; */ boolean hasProcessed(); /** - * optional bool processed = 2; - * *
      * used for mutate to indicate processed only
      * 
+ * + * optional bool processed = 2; */ boolean getProcessed(); } /** * Protobuf type {@code hbase.pb.MutateResponse} */ - public static final class MutateResponse extends - com.google.protobuf.GeneratedMessage - implements MutateResponseOrBuilder { + public static final class MutateResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MutateResponse) + MutateResponseOrBuilder { // Use MutateResponse.newBuilder() to construct. - private MutateResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private MutateResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MutateResponse defaultInstance; - public static MutateResponse getDefaultInstance() { - return defaultInstance; } - - public MutateResponse getDefaultInstanceForType() { - return defaultInstance; + private MutateResponse() { + processed_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MutateResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -13287,7 +13602,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -13298,30 +13613,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MutateResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MutateResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.Result result = 1; public static final int RESULT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_; /** @@ -13334,47 +13633,43 @@ public final class ClientProtos { * optional .hbase.pb.Result result = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { - return result_; + return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } /** * optional .hbase.pb.Result result = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { - return result_; + return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } - // optional bool processed = 2; public static final int PROCESSED_FIELD_NUMBER = 2; private boolean processed_; /** - * optional bool processed = 2; - * *
      * used for mutate to indicate processed only
      * 
+ * + * optional bool processed = 2; */ public boolean hasProcessed() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional bool processed = 2; - * *
      * used for mutate to indicate processed only
      * 
+ * + * optional bool processed = 2; */ public boolean getProcessed() { return processed_; } - private void initFields() { - result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - processed_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -13382,43 +13677,35 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, result_); + output.writeMessage(1, getResult()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, processed_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, result_); + .computeMessageSize(1, getResult()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, processed_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -13439,12 +13726,10 @@ public final class ClientProtos { result = result && (getProcessed() == other.getProcessed()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -13458,9 +13743,10 @@ public final class ClientProtos { } if (hasProcessed()) { hash = (37 * hash) + PROCESSED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getProcessed()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getProcessed()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -13488,46 +13774,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -13535,14 +13832,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.MutateResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MutateResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -13555,23 +13853,20 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getResultFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + result_ = null; } else { resultBuilder_.clear(); } @@ -13581,10 +13876,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor; @@ -13623,6 +13914,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse)other); @@ -13640,7 +13957,8 @@ public final class ClientProtos { if (other.hasProcessed()) { setProcessed(other.getProcessed()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -13657,7 +13975,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -13667,9 +13985,8 @@ public final class ClientProtos { } private int bitField0_; - // optional .hbase.pb.Result result = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; /** * optional .hbase.pb.Result result = 1; @@ -13682,7 +13999,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { if (resultBuilder_ == null) { - return result_; + return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } else { return resultBuilder_.getMessage(); } @@ -13723,6 +14040,7 @@ public final class ClientProtos { public Builder mergeResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + result_ != null && result_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); @@ -13741,7 +14059,7 @@ public final class ClientProtos { */ public Builder clearResult() { if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + result_ = null; onChanged(); } else { resultBuilder_.clear(); @@ -13764,19 +14082,20 @@ public final class ClientProtos { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilder(); } else { - return result_; + return result_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } } /** * optional .hbase.pb.Result result = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + resultBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder>( - result_, + getResult(), getParentForChildren(), isClean()); result_ = null; @@ -13784,34 +14103,33 @@ public final class ClientProtos { return resultBuilder_; } - // optional bool processed = 2; private boolean processed_ ; /** - * optional bool processed = 2; - * *
        * used for mutate to indicate processed only
        * 
+ * + * optional bool processed = 2; */ public boolean hasProcessed() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional bool processed = 2; - * *
        * used for mutate to indicate processed only
        * 
+ * + * optional bool processed = 2; */ public boolean getProcessed() { return processed_; } /** - * optional bool processed = 2; - * *
        * used for mutate to indicate processed only
        * 
+ * + * optional bool processed = 2; */ public Builder setProcessed(boolean value) { bitField0_ |= 0x00000002; @@ -13820,11 +14138,11 @@ public final class ClientProtos { return this; } /** - * optional bool processed = 2; - * *
        * used for mutate to indicate processed only
        * 
+ * + * optional bool processed = 2; */ public Builder clearProcessed() { bitField0_ = (bitField0_ & ~0x00000002); @@ -13832,22 +14150,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MutateResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.MutateResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse DEFAULT_INSTANCE; static { - defaultInstance = new MutateResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MutateResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MutateResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MutateResponse) } - public interface ScanOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ScanOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Scan) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.Column column = 1; /** * repeated .hbase.pb.Column column = 1; */ @@ -13872,7 +14227,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index); - // repeated .hbase.pb.NameBytesPair attribute = 2; /** * repeated .hbase.pb.NameBytesPair attribute = 2; */ @@ -13897,7 +14251,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); - // optional bytes start_row = 3; /** * optional bytes start_row = 3; */ @@ -13907,7 +14260,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getStartRow(); - // optional bytes stop_row = 4; /** * optional bytes stop_row = 4; */ @@ -13917,7 +14269,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getStopRow(); - // optional .hbase.pb.Filter filter = 5; /** * optional .hbase.pb.Filter filter = 5; */ @@ -13931,7 +14282,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder(); - // optional .hbase.pb.TimeRange time_range = 6; /** * optional .hbase.pb.TimeRange time_range = 6; */ @@ -13945,7 +14295,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - // optional uint32 max_versions = 7 [default = 1]; /** * optional uint32 max_versions = 7 [default = 1]; */ @@ -13955,7 +14304,6 @@ public final class ClientProtos { */ int getMaxVersions(); - // optional bool cache_blocks = 8 [default = true]; /** * optional bool cache_blocks = 8 [default = true]; */ @@ -13965,7 +14313,6 @@ public final class ClientProtos { */ boolean getCacheBlocks(); - // optional uint32 batch_size = 9; /** * optional uint32 batch_size = 9; */ @@ -13975,7 +14322,6 @@ public final class ClientProtos { */ int getBatchSize(); - // optional uint64 max_result_size = 10; /** * optional uint64 max_result_size = 10; */ @@ -13985,7 +14331,6 @@ public final class ClientProtos { */ long getMaxResultSize(); - // optional uint32 store_limit = 11; /** * optional uint32 store_limit = 11; */ @@ -13995,7 +14340,6 @@ public final class ClientProtos { */ int getStoreLimit(); - // optional uint32 store_offset = 12; /** * optional uint32 store_offset = 12; */ @@ -14005,25 +14349,23 @@ public final class ClientProtos { */ int getStoreOffset(); - // optional bool load_column_families_on_demand = 13; /** - * optional bool load_column_families_on_demand = 13; - * *
      * DO NOT add defaults to load_column_families_on_demand. 
      * 
+ * + * optional bool load_column_families_on_demand = 13; */ boolean hasLoadColumnFamiliesOnDemand(); /** - * optional bool load_column_families_on_demand = 13; - * *
      * DO NOT add defaults to load_column_families_on_demand. 
      * 
+ * + * optional bool load_column_families_on_demand = 13; */ boolean getLoadColumnFamiliesOnDemand(); - // optional bool small = 14; /** * optional bool small = 14; */ @@ -14033,7 +14375,6 @@ public final class ClientProtos { */ boolean getSmall(); - // optional bool reversed = 15 [default = false]; /** * optional bool reversed = 15 [default = false]; */ @@ -14043,7 +14384,6 @@ public final class ClientProtos { */ boolean getReversed(); - // optional .hbase.pb.Consistency consistency = 16 [default = STRONG]; /** * optional .hbase.pb.Consistency consistency = 16 [default = STRONG]; */ @@ -14053,7 +14393,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency(); - // optional uint32 caching = 17; /** * optional uint32 caching = 17; */ @@ -14063,7 +14402,6 @@ public final class ClientProtos { */ int getCaching(); - // optional bool allow_partial_results = 18; /** * optional bool allow_partial_results = 18; */ @@ -14073,7 +14411,6 @@ public final class ClientProtos { */ boolean getAllowPartialResults(); - // repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19; /** * repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19; */ @@ -14099,49 +14436,56 @@ public final class ClientProtos { int index); } /** - * Protobuf type {@code hbase.pb.Scan} - * *
    **
    * Instead of get from a table, you can scan it with optional filters.
    * You can specify the row key range, time range, the columns/families
    * to scan and so on.
-   *
    * This scan is used the first time in a scan request. The response of
    * the initial scan will return a scanner id, which should be used to
    * fetch result batches later on before it is closed.
    * 
+ * + * Protobuf type {@code hbase.pb.Scan} */ - public static final class Scan extends - com.google.protobuf.GeneratedMessage - implements ScanOrBuilder { + public static final class Scan extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Scan) + ScanOrBuilder { // Use Scan.newBuilder() to construct. - private Scan(com.google.protobuf.GeneratedMessage.Builder builder) { + private Scan(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private Scan(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Scan defaultInstance; - public static Scan getDefaultInstance() { - return defaultInstance; - } - - public Scan getDefaultInstanceForType() { - return defaultInstance; + private Scan() { + column_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + startRow_ = com.google.protobuf.ByteString.EMPTY; + stopRow_ = com.google.protobuf.ByteString.EMPTY; + maxVersions_ = 1; + cacheBlocks_ = true; + batchSize_ = 0; + maxResultSize_ = 0L; + storeLimit_ = 0; + storeOffset_ = 0; + loadColumnFamiliesOnDemand_ = false; + small_ = false; + reversed_ = false; + consistency_ = 0; + caching_ = 0; + allowPartialResults_ = false; + cfTimeRange_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Scan( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -14165,7 +14509,8 @@ public final class ClientProtos { column_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - column_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry)); + column_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry)); break; } case 18: { @@ -14173,7 +14518,8 @@ public final class ClientProtos { attribute_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - attribute_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); + attribute_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); break; } case 26: { @@ -14264,7 +14610,7 @@ public final class ClientProtos { unknownFields.mergeVarintField(16, rawValue); } else { bitField0_ |= 0x00002000; - consistency_ = value; + consistency_ = rawValue; } break; } @@ -14283,7 +14629,8 @@ public final class ClientProtos { cfTimeRange_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00040000; } - cfTimeRange_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry)); + cfTimeRange_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry)); break; } } @@ -14292,7 +14639,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { column_ = java.util.Collections.unmodifiableList(column_); @@ -14312,30 +14659,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Scan parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Scan(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // repeated .hbase.pb.Column column = 1; public static final int COLUMN_FIELD_NUMBER = 1; private java.util.List column_; /** @@ -14371,7 +14702,6 @@ public final class ClientProtos { return column_.get(index); } - // repeated .hbase.pb.NameBytesPair attribute = 2; public static final int ATTRIBUTE_FIELD_NUMBER = 2; private java.util.List attribute_; /** @@ -14407,7 +14737,6 @@ public final class ClientProtos { return attribute_.get(index); } - // optional bytes start_row = 3; public static final int START_ROW_FIELD_NUMBER = 3; private com.google.protobuf.ByteString startRow_; /** @@ -14423,7 +14752,6 @@ public final class ClientProtos { return startRow_; } - // optional bytes stop_row = 4; public static final int STOP_ROW_FIELD_NUMBER = 4; private com.google.protobuf.ByteString stopRow_; /** @@ -14439,7 +14767,6 @@ public final class ClientProtos { return stopRow_; } - // optional .hbase.pb.Filter filter = 5; public static final int FILTER_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_; /** @@ -14452,16 +14779,15 @@ public final class ClientProtos { * optional .hbase.pb.Filter filter = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } /** * optional .hbase.pb.Filter filter = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } - // optional .hbase.pb.TimeRange time_range = 6; public static final int TIME_RANGE_FIELD_NUMBER = 6; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_; /** @@ -14474,16 +14800,15 @@ public final class ClientProtos { * optional .hbase.pb.TimeRange time_range = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } /** * optional .hbase.pb.TimeRange time_range = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } - // optional uint32 max_versions = 7 [default = 1]; public static final int MAX_VERSIONS_FIELD_NUMBER = 7; private int maxVersions_; /** @@ -14499,7 +14824,6 @@ public final class ClientProtos { return maxVersions_; } - // optional bool cache_blocks = 8 [default = true]; public static final int CACHE_BLOCKS_FIELD_NUMBER = 8; private boolean cacheBlocks_; /** @@ -14515,7 +14839,6 @@ public final class ClientProtos { return cacheBlocks_; } - // optional uint32 batch_size = 9; public static final int BATCH_SIZE_FIELD_NUMBER = 9; private int batchSize_; /** @@ -14531,7 +14854,6 @@ public final class ClientProtos { return batchSize_; } - // optional uint64 max_result_size = 10; public static final int MAX_RESULT_SIZE_FIELD_NUMBER = 10; private long maxResultSize_; /** @@ -14547,7 +14869,6 @@ public final class ClientProtos { return maxResultSize_; } - // optional uint32 store_limit = 11; public static final int STORE_LIMIT_FIELD_NUMBER = 11; private int storeLimit_; /** @@ -14563,7 +14884,6 @@ public final class ClientProtos { return storeLimit_; } - // optional uint32 store_offset = 12; public static final int STORE_OFFSET_FIELD_NUMBER = 12; private int storeOffset_; /** @@ -14579,31 +14899,29 @@ public final class ClientProtos { return storeOffset_; } - // optional bool load_column_families_on_demand = 13; public static final int LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER = 13; private boolean loadColumnFamiliesOnDemand_; /** - * optional bool load_column_families_on_demand = 13; - * *
      * DO NOT add defaults to load_column_families_on_demand. 
      * 
+ * + * optional bool load_column_families_on_demand = 13; */ public boolean hasLoadColumnFamiliesOnDemand() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** - * optional bool load_column_families_on_demand = 13; - * *
      * DO NOT add defaults to load_column_families_on_demand. 
      * 
+ * + * optional bool load_column_families_on_demand = 13; */ public boolean getLoadColumnFamiliesOnDemand() { return loadColumnFamiliesOnDemand_; } - // optional bool small = 14; public static final int SMALL_FIELD_NUMBER = 14; private boolean small_; /** @@ -14619,7 +14937,6 @@ public final class ClientProtos { return small_; } - // optional bool reversed = 15 [default = false]; public static final int REVERSED_FIELD_NUMBER = 15; private boolean reversed_; /** @@ -14635,9 +14952,8 @@ public final class ClientProtos { return reversed_; } - // optional .hbase.pb.Consistency consistency = 16 [default = STRONG]; public static final int CONSISTENCY_FIELD_NUMBER = 16; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency consistency_; + private int consistency_; /** * optional .hbase.pb.Consistency consistency = 16 [default = STRONG]; */ @@ -14648,10 +14964,10 @@ public final class ClientProtos { * optional .hbase.pb.Consistency consistency = 16 [default = STRONG]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency() { - return consistency_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.valueOf(consistency_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG : result; } - // optional uint32 caching = 17; public static final int CACHING_FIELD_NUMBER = 17; private int caching_; /** @@ -14667,7 +14983,6 @@ public final class ClientProtos { return caching_; } - // optional bool allow_partial_results = 18; public static final int ALLOW_PARTIAL_RESULTS_FIELD_NUMBER = 18; private boolean allowPartialResults_; /** @@ -14683,7 +14998,6 @@ public final class ClientProtos { return allowPartialResults_; } - // repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19; public static final int CF_TIME_RANGE_FIELD_NUMBER = 19; private java.util.List cfTimeRange_; /** @@ -14719,31 +15033,11 @@ public final class ClientProtos { return cfTimeRange_.get(index); } - private void initFields() { - column_ = java.util.Collections.emptyList(); - attribute_ = java.util.Collections.emptyList(); - startRow_ = com.google.protobuf.ByteString.EMPTY; - stopRow_ = com.google.protobuf.ByteString.EMPTY; - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - maxVersions_ = 1; - cacheBlocks_ = true; - batchSize_ = 0; - maxResultSize_ = 0L; - storeLimit_ = 0; - storeOffset_ = 0; - loadColumnFamiliesOnDemand_ = false; - small_ = false; - reversed_ = false; - consistency_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG; - caching_ = 0; - allowPartialResults_ = false; - cfTimeRange_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getColumnCount(); i++) { if (!getColumn(i).isInitialized()) { @@ -14775,7 +15069,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < column_.size(); i++) { output.writeMessage(1, column_.get(i)); } @@ -14789,10 +15082,10 @@ public final class ClientProtos { output.writeBytes(4, stopRow_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(5, filter_); + output.writeMessage(5, getFilter()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(6, timeRange_); + output.writeMessage(6, getTimeRange()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeUInt32(7, maxVersions_); @@ -14822,7 +15115,7 @@ public final class ClientProtos { output.writeBool(15, reversed_); } if (((bitField0_ & 0x00002000) == 0x00002000)) { - output.writeEnum(16, consistency_.getNumber()); + output.writeEnum(16, consistency_); } if (((bitField0_ & 0x00004000) == 0x00004000)) { output.writeUInt32(17, caching_); @@ -14833,12 +15126,11 @@ public final class ClientProtos { for (int i = 0; i < cfTimeRange_.size(); i++) { output.writeMessage(19, cfTimeRange_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -14860,11 +15152,11 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, filter_); + .computeMessageSize(5, getFilter()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, timeRange_); + .computeMessageSize(6, getTimeRange()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream @@ -14904,7 +15196,7 @@ public final class ClientProtos { } if (((bitField0_ & 0x00002000) == 0x00002000)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(16, consistency_.getNumber()); + .computeEnumSize(16, consistency_); } if (((bitField0_ & 0x00004000) == 0x00004000)) { size += com.google.protobuf.CodedOutputStream @@ -14918,19 +15210,13 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(19, cfTimeRange_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -15012,8 +15298,7 @@ public final class ClientProtos { } result = result && (hasConsistency() == other.hasConsistency()); if (hasConsistency()) { - result = result && - (getConsistency() == other.getConsistency()); + result = result && consistency_ == other.consistency_; } result = result && (hasCaching() == other.hasCaching()); if (hasCaching()) { @@ -15027,12 +15312,10 @@ public final class ClientProtos { } result = result && getCfTimeRangeList() .equals(other.getCfTimeRangeList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -15070,7 +15353,8 @@ public final class ClientProtos { } if (hasCacheBlocks()) { hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCacheBlocks()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getCacheBlocks()); } if (hasBatchSize()) { hash = (37 * hash) + BATCH_SIZE_FIELD_NUMBER; @@ -15078,7 +15362,8 @@ public final class ClientProtos { } if (hasMaxResultSize()) { hash = (37 * hash) + MAX_RESULT_SIZE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getMaxResultSize()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getMaxResultSize()); } if (hasStoreLimit()) { hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER; @@ -15090,19 +15375,22 @@ public final class ClientProtos { } if (hasLoadColumnFamiliesOnDemand()) { hash = (37 * hash) + LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getLoadColumnFamiliesOnDemand()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getLoadColumnFamiliesOnDemand()); } if (hasSmall()) { hash = (37 * hash) + SMALL_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getSmall()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getSmall()); } if (hasReversed()) { hash = (37 * hash) + REVERSED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getReversed()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getReversed()); } if (hasConsistency()) { hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getConsistency()); + hash = (53 * hash) + consistency_; } if (hasCaching()) { hash = (37 * hash) + CACHING_FIELD_NUMBER; @@ -15110,13 +15398,14 @@ public final class ClientProtos { } if (hasAllowPartialResults()) { hash = (37 * hash) + ALLOW_PARTIAL_RESULTS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getAllowPartialResults()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getAllowPartialResults()); } if (getCfTimeRangeCount() > 0) { hash = (37 * hash) + CF_TIME_RANGE_FIELD_NUMBER; hash = (53 * hash) + getCfTimeRangeList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -15144,72 +15433,83 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.Scan} - * *
      **
      * Instead of get from a table, you can scan it with optional filters.
      * You can specify the row key range, time range, the columns/families
      * to scan and so on.
-     *
      * This scan is used the first time in a scan request. The response of
      * the initial scan will return a scanner id, which should be used to
      * fetch result batches later on before it is closed.
      * 
+ * + * Protobuf type {@code hbase.pb.Scan} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Scan) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -15222,12 +15522,13 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getColumnFieldBuilder(); getAttributeFieldBuilder(); getFilterFieldBuilder(); @@ -15235,10 +15536,6 @@ public final class ClientProtos { getCfTimeRangeFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (columnBuilder_ == null) { @@ -15258,13 +15555,13 @@ public final class ClientProtos { stopRow_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); + filter_ = null; } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + timeRange_ = null; } else { timeRangeBuilder_.clear(); } @@ -15287,7 +15584,7 @@ public final class ClientProtos { bitField0_ = (bitField0_ & ~0x00002000); reversed_ = false; bitField0_ = (bitField0_ & ~0x00004000); - consistency_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG; + consistency_ = 0; bitField0_ = (bitField0_ & ~0x00008000); caching_ = 0; bitField0_ = (bitField0_ & ~0x00010000); @@ -15302,10 +15599,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor; @@ -15431,6 +15724,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan)other); @@ -15461,7 +15780,7 @@ public final class ClientProtos { column_ = other.column_; bitField0_ = (bitField0_ & ~0x00000001); columnBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getColumnFieldBuilder() : null; } else { columnBuilder_.addAllMessages(other.column_); @@ -15487,7 +15806,7 @@ public final class ClientProtos { attribute_ = other.attribute_; bitField0_ = (bitField0_ & ~0x00000002); attributeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributeFieldBuilder() : null; } else { attributeBuilder_.addAllMessages(other.attribute_); @@ -15561,39 +15880,36 @@ public final class ClientProtos { cfTimeRange_ = other.cfTimeRange_; bitField0_ = (bitField0_ & ~0x00040000); cfTimeRangeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCfTimeRangeFieldBuilder() : null; } else { cfTimeRangeBuilder_.addAllMessages(other.cfTimeRange_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getColumnCount(); i++) { if (!getColumn(i).isInitialized()) { - return false; } } for (int i = 0; i < getAttributeCount(); i++) { if (!getAttribute(i).isInitialized()) { - return false; } } if (hasFilter()) { if (!getFilter().isInitialized()) { - return false; } } for (int i = 0; i < getCfTimeRangeCount(); i++) { if (!getCfTimeRange(i).isInitialized()) { - return false; } } @@ -15609,7 +15925,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15619,7 +15935,6 @@ public final class ClientProtos { } private int bitField0_; - // repeated .hbase.pb.Column column = 1; private java.util.List column_ = java.util.Collections.emptyList(); private void ensureColumnIsMutable() { @@ -15629,7 +15944,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; /** @@ -15761,7 +16076,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (columnBuilder_ == null) { ensureColumnIsMutable(); - super.addAll(values, column_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, column_); onChanged(); } else { columnBuilder_.addAllMessages(values); @@ -15844,11 +16160,11 @@ public final class ClientProtos { getColumnBuilderList() { return getColumnFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnFieldBuilder() { if (columnBuilder_ == null) { - columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder>( column_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -15859,7 +16175,6 @@ public final class ClientProtos { return columnBuilder_; } - // repeated .hbase.pb.NameBytesPair attribute = 2; private java.util.List attribute_ = java.util.Collections.emptyList(); private void ensureAttributeIsMutable() { @@ -15869,7 +16184,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; /** @@ -16001,7 +16316,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); - super.addAll(values, attribute_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, attribute_); onChanged(); } else { attributeBuilder_.addAllMessages(values); @@ -16084,11 +16400,11 @@ public final class ClientProtos { getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder() { if (attributeBuilder_ == null) { - attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( attribute_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -16099,7 +16415,6 @@ public final class ClientProtos { return attributeBuilder_; } - // optional bytes start_row = 3; private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes start_row = 3; @@ -16135,7 +16450,6 @@ public final class ClientProtos { return this; } - // optional bytes stop_row = 4; private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes stop_row = 4; @@ -16171,9 +16485,8 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.Filter filter = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; /** * optional .hbase.pb.Filter filter = 5; @@ -16186,7 +16499,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { if (filterBuilder_ == null) { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } else { return filterBuilder_.getMessage(); } @@ -16227,6 +16540,7 @@ public final class ClientProtos { public Builder mergeFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && + filter_ != null && filter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); @@ -16245,7 +16559,7 @@ public final class ClientProtos { */ public Builder clearFilter() { if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); + filter_ = null; onChanged(); } else { filterBuilder_.clear(); @@ -16268,19 +16582,20 @@ public final class ClientProtos { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { - return filter_; + return filter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } } /** * optional .hbase.pb.Filter filter = 5; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + filterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder>( - filter_, + getFilter(), getParentForChildren(), isClean()); filter_ = null; @@ -16288,9 +16603,8 @@ public final class ClientProtos { return filterBuilder_; } - // optional .hbase.pb.TimeRange time_range = 6; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; /** * optional .hbase.pb.TimeRange time_range = 6; @@ -16303,7 +16617,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } else { return timeRangeBuilder_.getMessage(); } @@ -16344,6 +16658,7 @@ public final class ClientProtos { public Builder mergeTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && + timeRange_ != null && timeRange_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); @@ -16362,7 +16677,7 @@ public final class ClientProtos { */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + timeRange_ = null; onChanged(); } else { timeRangeBuilder_.clear(); @@ -16385,19 +16700,20 @@ public final class ClientProtos { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); } else { - return timeRange_; + return timeRange_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } } /** * optional .hbase.pb.TimeRange time_range = 6; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, + getTimeRange(), getParentForChildren(), isClean()); timeRange_ = null; @@ -16405,7 +16721,6 @@ public final class ClientProtos { return timeRangeBuilder_; } - // optional uint32 max_versions = 7 [default = 1]; private int maxVersions_ = 1; /** * optional uint32 max_versions = 7 [default = 1]; @@ -16438,7 +16753,6 @@ public final class ClientProtos { return this; } - // optional bool cache_blocks = 8 [default = true]; private boolean cacheBlocks_ = true; /** * optional bool cache_blocks = 8 [default = true]; @@ -16471,7 +16785,6 @@ public final class ClientProtos { return this; } - // optional uint32 batch_size = 9; private int batchSize_ ; /** * optional uint32 batch_size = 9; @@ -16504,7 +16817,6 @@ public final class ClientProtos { return this; } - // optional uint64 max_result_size = 10; private long maxResultSize_ ; /** * optional uint64 max_result_size = 10; @@ -16537,7 +16849,6 @@ public final class ClientProtos { return this; } - // optional uint32 store_limit = 11; private int storeLimit_ ; /** * optional uint32 store_limit = 11; @@ -16570,7 +16881,6 @@ public final class ClientProtos { return this; } - // optional uint32 store_offset = 12; private int storeOffset_ ; /** * optional uint32 store_offset = 12; @@ -16603,34 +16913,33 @@ public final class ClientProtos { return this; } - // optional bool load_column_families_on_demand = 13; private boolean loadColumnFamiliesOnDemand_ ; /** - * optional bool load_column_families_on_demand = 13; - * *
        * DO NOT add defaults to load_column_families_on_demand. 
        * 
+ * + * optional bool load_column_families_on_demand = 13; */ public boolean hasLoadColumnFamiliesOnDemand() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** - * optional bool load_column_families_on_demand = 13; - * *
        * DO NOT add defaults to load_column_families_on_demand. 
        * 
+ * + * optional bool load_column_families_on_demand = 13; */ public boolean getLoadColumnFamiliesOnDemand() { return loadColumnFamiliesOnDemand_; } /** - * optional bool load_column_families_on_demand = 13; - * *
        * DO NOT add defaults to load_column_families_on_demand. 
        * 
+ * + * optional bool load_column_families_on_demand = 13; */ public Builder setLoadColumnFamiliesOnDemand(boolean value) { bitField0_ |= 0x00001000; @@ -16639,11 +16948,11 @@ public final class ClientProtos { return this; } /** - * optional bool load_column_families_on_demand = 13; - * *
        * DO NOT add defaults to load_column_families_on_demand. 
        * 
+ * + * optional bool load_column_families_on_demand = 13; */ public Builder clearLoadColumnFamiliesOnDemand() { bitField0_ = (bitField0_ & ~0x00001000); @@ -16652,7 +16961,6 @@ public final class ClientProtos { return this; } - // optional bool small = 14; private boolean small_ ; /** * optional bool small = 14; @@ -16685,7 +16993,6 @@ public final class ClientProtos { return this; } - // optional bool reversed = 15 [default = false]; private boolean reversed_ ; /** * optional bool reversed = 15 [default = false]; @@ -16718,8 +17025,7 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.Consistency consistency = 16 [default = STRONG]; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG; + private int consistency_ = 0; /** * optional .hbase.pb.Consistency consistency = 16 [default = STRONG]; */ @@ -16730,7 +17036,8 @@ public final class ClientProtos { * optional .hbase.pb.Consistency consistency = 16 [default = STRONG]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency() { - return consistency_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.valueOf(consistency_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG : result; } /** * optional .hbase.pb.Consistency consistency = 16 [default = STRONG]; @@ -16740,7 +17047,7 @@ public final class ClientProtos { throw new NullPointerException(); } bitField0_ |= 0x00008000; - consistency_ = value; + consistency_ = value.getNumber(); onChanged(); return this; } @@ -16749,12 +17056,11 @@ public final class ClientProtos { */ public Builder clearConsistency() { bitField0_ = (bitField0_ & ~0x00008000); - consistency_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG; + consistency_ = 0; onChanged(); return this; } - // optional uint32 caching = 17; private int caching_ ; /** * optional uint32 caching = 17; @@ -16787,7 +17093,6 @@ public final class ClientProtos { return this; } - // optional bool allow_partial_results = 18; private boolean allowPartialResults_ ; /** * optional bool allow_partial_results = 18; @@ -16820,7 +17125,6 @@ public final class ClientProtos { return this; } - // repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19; private java.util.List cfTimeRange_ = java.util.Collections.emptyList(); private void ensureCfTimeRangeIsMutable() { @@ -16830,7 +17134,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> cfTimeRangeBuilder_; /** @@ -16962,7 +17266,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); - super.addAll(values, cfTimeRange_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, cfTimeRange_); onChanged(); } else { cfTimeRangeBuilder_.addAllMessages(values); @@ -17045,11 +17350,11 @@ public final class ClientProtos { getCfTimeRangeBuilderList() { return getCfTimeRangeFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeFieldBuilder() { if (cfTimeRangeBuilder_ == null) { - cfTimeRangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + cfTimeRangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>( cfTimeRange_, ((bitField0_ & 0x00040000) == 0x00040000), @@ -17059,22 +17364,59 @@ public final class ClientProtos { } return cfTimeRangeBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Scan) } + // @@protoc_insertion_point(class_scope:hbase.pb.Scan) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan DEFAULT_INSTANCE; static { - defaultInstance = new Scan(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Scan parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Scan(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Scan) } - public interface ScanRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ScanRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ScanRequest) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.RegionSpecifier region = 1; /** * optional .hbase.pb.RegionSpecifier region = 1; */ @@ -17088,7 +17430,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // optional .hbase.pb.Scan scan = 2; /** * optional .hbase.pb.Scan scan = 2; */ @@ -17102,7 +17443,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); - // optional uint64 scanner_id = 3; /** * optional uint64 scanner_id = 3; */ @@ -17112,7 +17452,6 @@ public final class ClientProtos { */ long getScannerId(); - // optional uint32 number_of_rows = 4; /** * optional uint32 number_of_rows = 4; */ @@ -17122,7 +17461,6 @@ public final class ClientProtos { */ int getNumberOfRows(); - // optional bool close_scanner = 5; /** * optional bool close_scanner = 5; */ @@ -17132,7 +17470,6 @@ public final class ClientProtos { */ boolean getCloseScanner(); - // optional uint64 next_call_seq = 6; /** * optional uint64 next_call_seq = 6; */ @@ -17142,7 +17479,6 @@ public final class ClientProtos { */ long getNextCallSeq(); - // optional bool client_handles_partials = 7; /** * optional bool client_handles_partials = 7; */ @@ -17152,7 +17488,6 @@ public final class ClientProtos { */ boolean getClientHandlesPartials(); - // optional bool client_handles_heartbeats = 8; /** * optional bool client_handles_heartbeats = 8; */ @@ -17162,7 +17497,6 @@ public final class ClientProtos { */ boolean getClientHandlesHeartbeats(); - // optional bool track_scan_metrics = 9; /** * optional bool track_scan_metrics = 9; */ @@ -17172,7 +17506,6 @@ public final class ClientProtos { */ boolean getTrackScanMetrics(); - // optional bool renew = 10 [default = false]; /** * optional bool renew = 10 [default = false]; */ @@ -17183,51 +17516,48 @@ public final class ClientProtos { boolean getRenew(); } /** - * Protobuf type {@code hbase.pb.ScanRequest} - * *
    **
    * A scan request. Initially, it should specify a scan. Later on, you
    * can use the scanner id returned to fetch result batches with a different
    * scan request.
-   *
    * The scanner will remain open if there are more results, and it's not
    * asked to be closed explicitly.
-   *
    * You can fetch the results and ask the scanner to be closed to save
    * a trip if you are not interested in remaining results.
    * 
+ * + * Protobuf type {@code hbase.pb.ScanRequest} */ - public static final class ScanRequest extends - com.google.protobuf.GeneratedMessage - implements ScanRequestOrBuilder { + public static final class ScanRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ScanRequest) + ScanRequestOrBuilder { // Use ScanRequest.newBuilder() to construct. - private ScanRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ScanRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ScanRequest defaultInstance; - public static ScanRequest getDefaultInstance() { - return defaultInstance; } - - public ScanRequest getDefaultInstanceForType() { - return defaultInstance; + private ScanRequest() { + scannerId_ = 0L; + numberOfRows_ = 0; + closeScanner_ = false; + nextCallSeq_ = 0L; + clientHandlesPartials_ = false; + clientHandlesHeartbeats_ = false; + trackScanMetrics_ = false; + renew_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ScanRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -17318,7 +17648,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -17329,30 +17659,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ScanRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ScanRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -17365,16 +17679,15 @@ public final class ClientProtos { * optional .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * optional .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // optional .hbase.pb.Scan scan = 2; public static final int SCAN_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan scan_; /** @@ -17387,16 +17700,15 @@ public final class ClientProtos { * optional .hbase.pb.Scan scan = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan getScan() { - return scan_; + return scan_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance() : scan_; } /** * optional .hbase.pb.Scan scan = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - return scan_; + return scan_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance() : scan_; } - // optional uint64 scanner_id = 3; public static final int SCANNER_ID_FIELD_NUMBER = 3; private long scannerId_; /** @@ -17412,7 +17724,6 @@ public final class ClientProtos { return scannerId_; } - // optional uint32 number_of_rows = 4; public static final int NUMBER_OF_ROWS_FIELD_NUMBER = 4; private int numberOfRows_; /** @@ -17428,7 +17739,6 @@ public final class ClientProtos { return numberOfRows_; } - // optional bool close_scanner = 5; public static final int CLOSE_SCANNER_FIELD_NUMBER = 5; private boolean closeScanner_; /** @@ -17444,7 +17754,6 @@ public final class ClientProtos { return closeScanner_; } - // optional uint64 next_call_seq = 6; public static final int NEXT_CALL_SEQ_FIELD_NUMBER = 6; private long nextCallSeq_; /** @@ -17460,7 +17769,6 @@ public final class ClientProtos { return nextCallSeq_; } - // optional bool client_handles_partials = 7; public static final int CLIENT_HANDLES_PARTIALS_FIELD_NUMBER = 7; private boolean clientHandlesPartials_; /** @@ -17476,7 +17784,6 @@ public final class ClientProtos { return clientHandlesPartials_; } - // optional bool client_handles_heartbeats = 8; public static final int CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER = 8; private boolean clientHandlesHeartbeats_; /** @@ -17492,7 +17799,6 @@ public final class ClientProtos { return clientHandlesHeartbeats_; } - // optional bool track_scan_metrics = 9; public static final int TRACK_SCAN_METRICS_FIELD_NUMBER = 9; private boolean trackScanMetrics_; /** @@ -17508,7 +17814,6 @@ public final class ClientProtos { return trackScanMetrics_; } - // optional bool renew = 10 [default = false]; public static final int RENEW_FIELD_NUMBER = 10; private boolean renew_; /** @@ -17524,22 +17829,11 @@ public final class ClientProtos { return renew_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - scan_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - scannerId_ = 0L; - numberOfRows_ = 0; - closeScanner_ = false; - nextCallSeq_ = 0L; - clientHandlesPartials_ = false; - clientHandlesHeartbeats_ = false; - trackScanMetrics_ = false; - renew_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasRegion()) { if (!getRegion().isInitialized()) { @@ -17559,12 +17853,11 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, scan_); + output.writeMessage(2, getScan()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, scannerId_); @@ -17590,22 +17883,21 @@ public final class ClientProtos { if (((bitField0_ & 0x00000200) == 0x00000200)) { output.writeBool(10, renew_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, scan_); + .computeMessageSize(2, getScan()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -17639,19 +17931,13 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(10, renew_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -17712,12 +17998,10 @@ public final class ClientProtos { result = result && (getRenew() == other.getRenew()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -17735,7 +18019,8 @@ public final class ClientProtos { } if (hasScannerId()) { hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getScannerId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getScannerId()); } if (hasNumberOfRows()) { hash = (37 * hash) + NUMBER_OF_ROWS_FIELD_NUMBER; @@ -17743,29 +18028,35 @@ public final class ClientProtos { } if (hasCloseScanner()) { hash = (37 * hash) + CLOSE_SCANNER_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCloseScanner()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getCloseScanner()); } if (hasNextCallSeq()) { hash = (37 * hash) + NEXT_CALL_SEQ_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNextCallSeq()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNextCallSeq()); } if (hasClientHandlesPartials()) { hash = (37 * hash) + CLIENT_HANDLES_PARTIALS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getClientHandlesPartials()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getClientHandlesPartials()); } if (hasClientHandlesHeartbeats()) { hash = (37 * hash) + CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getClientHandlesHeartbeats()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getClientHandlesHeartbeats()); } if (hasTrackScanMetrics()) { hash = (37 * hash) + TRACK_SCAN_METRICS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getTrackScanMetrics()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getTrackScanMetrics()); } if (hasRenew()) { hash = (37 * hash) + RENEW_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getRenew()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getRenew()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -17793,74 +18084,84 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ScanRequest} - * *
      **
      * A scan request. Initially, it should specify a scan. Later on, you
      * can use the scanner id returned to fetch result batches with a different
      * scan request.
-     *
      * The scanner will remain open if there are more results, and it's not
      * asked to be closed explicitly.
-     *
      * You can fetch the results and ask the scanner to be closed to save
      * a trip if you are not interested in remaining results.
      * 
+ * + * Protobuf type {@code hbase.pb.ScanRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ScanRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -17873,30 +18174,27 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getScanFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + scan_ = null; } else { scanBuilder_.clear(); } @@ -17920,10 +18218,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor; @@ -17998,6 +18292,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest)other); @@ -18039,20 +18359,19 @@ public final class ClientProtos { if (other.hasRenew()) { setRenew(other.getRenew()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasRegion()) { if (!getRegion().isInitialized()) { - return false; } } if (hasScan()) { if (!getScan().isInitialized()) { - return false; } } @@ -18068,7 +18387,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -18078,9 +18397,8 @@ public final class ClientProtos { } private int bitField0_; - // optional .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * optional .hbase.pb.RegionSpecifier region = 1; @@ -18093,7 +18411,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -18134,6 +18452,7 @@ public final class ClientProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -18152,7 +18471,7 @@ public final class ClientProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -18175,19 +18494,20 @@ public final class ClientProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * optional .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -18195,9 +18515,8 @@ public final class ClientProtos { return regionBuilder_; } - // optional .hbase.pb.Scan scan = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan scan_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; /** * optional .hbase.pb.Scan scan = 2; @@ -18210,7 +18529,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan getScan() { if (scanBuilder_ == null) { - return scan_; + return scan_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance() : scan_; } else { return scanBuilder_.getMessage(); } @@ -18251,6 +18570,7 @@ public final class ClientProtos { public Builder mergeScan(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan value) { if (scanBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + scan_ != null && scan_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { scan_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); @@ -18269,7 +18589,7 @@ public final class ClientProtos { */ public Builder clearScan() { if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + scan_ = null; onChanged(); } else { scanBuilder_.clear(); @@ -18292,19 +18612,20 @@ public final class ClientProtos { if (scanBuilder_ != null) { return scanBuilder_.getMessageOrBuilder(); } else { - return scan_; + return scan_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance() : scan_; } } /** * optional .hbase.pb.Scan scan = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder> getScanFieldBuilder() { if (scanBuilder_ == null) { - scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< + scanBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder>( - scan_, + getScan(), getParentForChildren(), isClean()); scan_ = null; @@ -18312,7 +18633,6 @@ public final class ClientProtos { return scanBuilder_; } - // optional uint64 scanner_id = 3; private long scannerId_ ; /** * optional uint64 scanner_id = 3; @@ -18345,7 +18665,6 @@ public final class ClientProtos { return this; } - // optional uint32 number_of_rows = 4; private int numberOfRows_ ; /** * optional uint32 number_of_rows = 4; @@ -18378,7 +18697,6 @@ public final class ClientProtos { return this; } - // optional bool close_scanner = 5; private boolean closeScanner_ ; /** * optional bool close_scanner = 5; @@ -18411,7 +18729,6 @@ public final class ClientProtos { return this; } - // optional uint64 next_call_seq = 6; private long nextCallSeq_ ; /** * optional uint64 next_call_seq = 6; @@ -18444,7 +18761,6 @@ public final class ClientProtos { return this; } - // optional bool client_handles_partials = 7; private boolean clientHandlesPartials_ ; /** * optional bool client_handles_partials = 7; @@ -18477,7 +18793,6 @@ public final class ClientProtos { return this; } - // optional bool client_handles_heartbeats = 8; private boolean clientHandlesHeartbeats_ ; /** * optional bool client_handles_heartbeats = 8; @@ -18510,7 +18825,6 @@ public final class ClientProtos { return this; } - // optional bool track_scan_metrics = 9; private boolean trackScanMetrics_ ; /** * optional bool track_scan_metrics = 9; @@ -18543,7 +18857,6 @@ public final class ClientProtos { return this; } - // optional bool renew = 10 [default = false]; private boolean renew_ ; /** * optional bool renew = 10 [default = false]; @@ -18575,25 +18888,60 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ScanRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ScanRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest DEFAULT_INSTANCE; static { - defaultInstance = new ScanRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ScanRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ScanRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ScanRequest) } - public interface ScanResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ScanResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ScanResponse) + com.google.protobuf.MessageOrBuilder { - // repeated uint32 cells_per_result = 1; /** - * repeated uint32 cells_per_result = 1; - * *
      * This field is filled in if we are doing cellblocks.  A cellblock is made up
      * of all Cells serialized out as one cellblock BUT responses from a server
@@ -18603,11 +18951,11 @@ public final class ClientProtos {
      * has 3, 3, 3 in it, then we know that on the client, we are to make
      * three Results each of three Cells each.
      * 
+ * + * repeated uint32 cells_per_result = 1; */ java.util.List getCellsPerResultList(); /** - * repeated uint32 cells_per_result = 1; - * *
      * This field is filled in if we are doing cellblocks.  A cellblock is made up
      * of all Cells serialized out as one cellblock BUT responses from a server
@@ -18617,11 +18965,11 @@ public final class ClientProtos {
      * has 3, 3, 3 in it, then we know that on the client, we are to make
      * three Results each of three Cells each.
      * 
+ * + * repeated uint32 cells_per_result = 1; */ int getCellsPerResultCount(); /** - * repeated uint32 cells_per_result = 1; - * *
      * This field is filled in if we are doing cellblocks.  A cellblock is made up
      * of all Cells serialized out as one cellblock BUT responses from a server
@@ -18631,10 +18979,11 @@ public final class ClientProtos {
      * has 3, 3, 3 in it, then we know that on the client, we are to make
      * three Results each of three Cells each.
      * 
+ * + * repeated uint32 cells_per_result = 1; */ int getCellsPerResult(int index); - // optional uint64 scanner_id = 2; /** * optional uint64 scanner_id = 2; */ @@ -18644,7 +18993,6 @@ public final class ClientProtos { */ long getScannerId(); - // optional bool more_results = 3; /** * optional bool more_results = 3; */ @@ -18654,7 +19002,6 @@ public final class ClientProtos { */ boolean getMoreResults(); - // optional uint32 ttl = 4; /** * optional uint32 ttl = 4; */ @@ -18664,62 +19011,60 @@ public final class ClientProtos { */ int getTtl(); - // repeated .hbase.pb.Result results = 5; /** - * repeated .hbase.pb.Result results = 5; - * *
      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
      * This field is mutually exclusive with cells_per_result (since the Cells will
      * be inside the pb'd Result)
      * 
+ * + * repeated .hbase.pb.Result results = 5; */ java.util.List getResultsList(); /** - * repeated .hbase.pb.Result results = 5; - * *
      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
      * This field is mutually exclusive with cells_per_result (since the Cells will
      * be inside the pb'd Result)
      * 
+ * + * repeated .hbase.pb.Result results = 5; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResults(int index); /** - * repeated .hbase.pb.Result results = 5; - * *
      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
      * This field is mutually exclusive with cells_per_result (since the Cells will
      * be inside the pb'd Result)
      * 
+ * + * repeated .hbase.pb.Result results = 5; */ int getResultsCount(); /** - * repeated .hbase.pb.Result results = 5; - * *
      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
      * This field is mutually exclusive with cells_per_result (since the Cells will
      * be inside the pb'd Result)
      * 
+ * + * repeated .hbase.pb.Result results = 5; */ java.util.List getResultsOrBuilderList(); /** - * repeated .hbase.pb.Result results = 5; - * *
      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
      * This field is mutually exclusive with cells_per_result (since the Cells will
      * be inside the pb'd Result)
      * 
+ * + * repeated .hbase.pb.Result results = 5; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder( int index); - // optional bool stale = 6; /** * optional bool stale = 6; */ @@ -18729,10 +19074,7 @@ public final class ClientProtos { */ boolean getStale(); - // repeated bool partial_flag_per_result = 7; /** - * repeated bool partial_flag_per_result = 7; - * *
      * This field is filled in if we are doing cellblocks. In the event that a row
      * could not fit all of its cells into a single RPC chunk, the results will be
@@ -18742,11 +19084,11 @@ public final class ClientProtos {
      * has false, false, true in it, then we know that on the client side, we need to
      * make another RPC request since the last result was only a partial.
      * 
+ * + * repeated bool partial_flag_per_result = 7; */ java.util.List getPartialFlagPerResultList(); /** - * repeated bool partial_flag_per_result = 7; - * *
      * This field is filled in if we are doing cellblocks. In the event that a row
      * could not fit all of its cells into a single RPC chunk, the results will be
@@ -18756,11 +19098,11 @@ public final class ClientProtos {
      * has false, false, true in it, then we know that on the client side, we need to
      * make another RPC request since the last result was only a partial.
      * 
+ * + * repeated bool partial_flag_per_result = 7; */ int getPartialFlagPerResultCount(); /** - * repeated bool partial_flag_per_result = 7; - * *
      * This field is filled in if we are doing cellblocks. In the event that a row
      * could not fit all of its cells into a single RPC chunk, the results will be
@@ -18770,126 +19112,125 @@ public final class ClientProtos {
      * has false, false, true in it, then we know that on the client side, we need to
      * make another RPC request since the last result was only a partial.
      * 
+ * + * repeated bool partial_flag_per_result = 7; */ boolean getPartialFlagPerResult(int index); - // optional bool more_results_in_region = 8; /** - * optional bool more_results_in_region = 8; - * *
      * A server may choose to limit the number of results returned to the client for
      * reasons such as the size in bytes or quantity of results accumulated. This field
      * will true when more results exist in the current region.
      * 
+ * + * optional bool more_results_in_region = 8; */ boolean hasMoreResultsInRegion(); /** - * optional bool more_results_in_region = 8; - * *
      * A server may choose to limit the number of results returned to the client for
      * reasons such as the size in bytes or quantity of results accumulated. This field
      * will true when more results exist in the current region.
      * 
+ * + * optional bool more_results_in_region = 8; */ boolean getMoreResultsInRegion(); - // optional bool heartbeat_message = 9; /** - * optional bool heartbeat_message = 9; - * *
      * This field is filled in if the server is sending back a heartbeat message.
      * Heartbeat messages are sent back to the client to prevent the scanner from
      * timing out. Seeing a heartbeat message communicates to the Client that the
      * server would have continued to scan had the time limit not been reached.
      * 
+ * + * optional bool heartbeat_message = 9; */ boolean hasHeartbeatMessage(); /** - * optional bool heartbeat_message = 9; - * *
      * This field is filled in if the server is sending back a heartbeat message.
      * Heartbeat messages are sent back to the client to prevent the scanner from
      * timing out. Seeing a heartbeat message communicates to the Client that the
      * server would have continued to scan had the time limit not been reached.
      * 
+ * + * optional bool heartbeat_message = 9; */ boolean getHeartbeatMessage(); - // optional .hbase.pb.ScanMetrics scan_metrics = 10; /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
      * This field is filled in if the client has requested that scan metrics be tracked.
      * The metrics tracked here are sent back to the client to be tracked together with 
      * the existing client side metrics.
      * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ boolean hasScanMetrics(); /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
      * This field is filled in if the client has requested that scan metrics be tracked.
      * The metrics tracked here are sent back to the client to be tracked together with 
      * the existing client side metrics.
      * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics(); /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
      * This field is filled in if the client has requested that scan metrics be tracked.
      * The metrics tracked here are sent back to the client to be tracked together with 
      * the existing client side metrics.
      * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder(); } /** - * Protobuf type {@code hbase.pb.ScanResponse} - * *
    **
    * The scan response. If there are no more results, more_results will
    * be false.  If it is not specified, it means there are more.
    * 
+ * + * Protobuf type {@code hbase.pb.ScanResponse} */ - public static final class ScanResponse extends - com.google.protobuf.GeneratedMessage - implements ScanResponseOrBuilder { + public static final class ScanResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ScanResponse) + ScanResponseOrBuilder { // Use ScanResponse.newBuilder() to construct. - private ScanResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ScanResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ScanResponse defaultInstance; - public static ScanResponse getDefaultInstance() { - return defaultInstance; } - - public ScanResponse getDefaultInstanceForType() { - return defaultInstance; + private ScanResponse() { + cellsPerResult_ = java.util.Collections.emptyList(); + scannerId_ = 0L; + moreResults_ = false; + ttl_ = 0; + results_ = java.util.Collections.emptyList(); + stale_ = false; + partialFlagPerResult_ = java.util.Collections.emptyList(); + moreResultsInRegion_ = false; + heartbeatMessage_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ScanResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -18949,7 +19290,8 @@ public final class ClientProtos { results_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000010; } - results_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry)); + results_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry)); break; } case 48: { @@ -19007,7 +19349,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_); @@ -19027,35 +19369,17 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ScanResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ScanResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // repeated uint32 cells_per_result = 1; public static final int CELLS_PER_RESULT_FIELD_NUMBER = 1; private java.util.List cellsPerResult_; /** - * repeated uint32 cells_per_result = 1; - * *
      * This field is filled in if we are doing cellblocks.  A cellblock is made up
      * of all Cells serialized out as one cellblock BUT responses from a server
@@ -19065,14 +19389,14 @@ public final class ClientProtos {
      * has 3, 3, 3 in it, then we know that on the client, we are to make
      * three Results each of three Cells each.
      * 
+ * + * repeated uint32 cells_per_result = 1; */ public java.util.List getCellsPerResultList() { return cellsPerResult_; } /** - * repeated uint32 cells_per_result = 1; - * *
      * This field is filled in if we are doing cellblocks.  A cellblock is made up
      * of all Cells serialized out as one cellblock BUT responses from a server
@@ -19082,13 +19406,13 @@ public final class ClientProtos {
      * has 3, 3, 3 in it, then we know that on the client, we are to make
      * three Results each of three Cells each.
      * 
+ * + * repeated uint32 cells_per_result = 1; */ public int getCellsPerResultCount() { return cellsPerResult_.size(); } /** - * repeated uint32 cells_per_result = 1; - * *
      * This field is filled in if we are doing cellblocks.  A cellblock is made up
      * of all Cells serialized out as one cellblock BUT responses from a server
@@ -19098,12 +19422,13 @@ public final class ClientProtos {
      * has 3, 3, 3 in it, then we know that on the client, we are to make
      * three Results each of three Cells each.
      * 
+ * + * repeated uint32 cells_per_result = 1; */ public int getCellsPerResult(int index) { return cellsPerResult_.get(index); } - // optional uint64 scanner_id = 2; public static final int SCANNER_ID_FIELD_NUMBER = 2; private long scannerId_; /** @@ -19119,7 +19444,6 @@ public final class ClientProtos { return scannerId_; } - // optional bool more_results = 3; public static final int MORE_RESULTS_FIELD_NUMBER = 3; private boolean moreResults_; /** @@ -19135,7 +19459,6 @@ public final class ClientProtos { return moreResults_; } - // optional uint32 ttl = 4; public static final int TTL_FIELD_NUMBER = 4; private int ttl_; /** @@ -19151,73 +19474,71 @@ public final class ClientProtos { return ttl_; } - // repeated .hbase.pb.Result results = 5; public static final int RESULTS_FIELD_NUMBER = 5; private java.util.List results_; /** - * repeated .hbase.pb.Result results = 5; - * *
      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
      * This field is mutually exclusive with cells_per_result (since the Cells will
      * be inside the pb'd Result)
      * 
+ * + * repeated .hbase.pb.Result results = 5; */ public java.util.List getResultsList() { return results_; } /** - * repeated .hbase.pb.Result results = 5; - * *
      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
      * This field is mutually exclusive with cells_per_result (since the Cells will
      * be inside the pb'd Result)
      * 
+ * + * repeated .hbase.pb.Result results = 5; */ public java.util.List getResultsOrBuilderList() { return results_; } /** - * repeated .hbase.pb.Result results = 5; - * *
      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
      * This field is mutually exclusive with cells_per_result (since the Cells will
      * be inside the pb'd Result)
      * 
+ * + * repeated .hbase.pb.Result results = 5; */ public int getResultsCount() { return results_.size(); } /** - * repeated .hbase.pb.Result results = 5; - * *
      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
      * This field is mutually exclusive with cells_per_result (since the Cells will
      * be inside the pb'd Result)
      * 
+ * + * repeated .hbase.pb.Result results = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResults(int index) { return results_.get(index); } /** - * repeated .hbase.pb.Result results = 5; - * *
      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
      * This field is mutually exclusive with cells_per_result (since the Cells will
      * be inside the pb'd Result)
      * 
+ * + * repeated .hbase.pb.Result results = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder( int index) { return results_.get(index); } - // optional bool stale = 6; public static final int STALE_FIELD_NUMBER = 6; private boolean stale_; /** @@ -19233,12 +19554,9 @@ public final class ClientProtos { return stale_; } - // repeated bool partial_flag_per_result = 7; public static final int PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER = 7; private java.util.List partialFlagPerResult_; /** - * repeated bool partial_flag_per_result = 7; - * *
      * This field is filled in if we are doing cellblocks. In the event that a row
      * could not fit all of its cells into a single RPC chunk, the results will be
@@ -19248,14 +19566,14 @@ public final class ClientProtos {
      * has false, false, true in it, then we know that on the client side, we need to
      * make another RPC request since the last result was only a partial.
      * 
+ * + * repeated bool partial_flag_per_result = 7; */ public java.util.List getPartialFlagPerResultList() { return partialFlagPerResult_; } /** - * repeated bool partial_flag_per_result = 7; - * *
      * This field is filled in if we are doing cellblocks. In the event that a row
      * could not fit all of its cells into a single RPC chunk, the results will be
@@ -19265,13 +19583,13 @@ public final class ClientProtos {
      * has false, false, true in it, then we know that on the client side, we need to
      * make another RPC request since the last result was only a partial.
      * 
+ * + * repeated bool partial_flag_per_result = 7; */ public int getPartialFlagPerResultCount() { return partialFlagPerResult_.size(); } /** - * repeated bool partial_flag_per_result = 7; - * *
      * This field is filled in if we are doing cellblocks. In the event that a row
      * could not fit all of its cells into a single RPC chunk, the results will be
@@ -19281,125 +19599,113 @@ public final class ClientProtos {
      * has false, false, true in it, then we know that on the client side, we need to
      * make another RPC request since the last result was only a partial.
      * 
+ * + * repeated bool partial_flag_per_result = 7; */ public boolean getPartialFlagPerResult(int index) { return partialFlagPerResult_.get(index); } - // optional bool more_results_in_region = 8; public static final int MORE_RESULTS_IN_REGION_FIELD_NUMBER = 8; private boolean moreResultsInRegion_; /** - * optional bool more_results_in_region = 8; - * *
      * A server may choose to limit the number of results returned to the client for
      * reasons such as the size in bytes or quantity of results accumulated. This field
      * will true when more results exist in the current region.
      * 
+ * + * optional bool more_results_in_region = 8; */ public boolean hasMoreResultsInRegion() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional bool more_results_in_region = 8; - * *
      * A server may choose to limit the number of results returned to the client for
      * reasons such as the size in bytes or quantity of results accumulated. This field
      * will true when more results exist in the current region.
      * 
+ * + * optional bool more_results_in_region = 8; */ public boolean getMoreResultsInRegion() { return moreResultsInRegion_; } - // optional bool heartbeat_message = 9; public static final int HEARTBEAT_MESSAGE_FIELD_NUMBER = 9; private boolean heartbeatMessage_; /** - * optional bool heartbeat_message = 9; - * *
      * This field is filled in if the server is sending back a heartbeat message.
      * Heartbeat messages are sent back to the client to prevent the scanner from
      * timing out. Seeing a heartbeat message communicates to the Client that the
      * server would have continued to scan had the time limit not been reached.
      * 
+ * + * optional bool heartbeat_message = 9; */ public boolean hasHeartbeatMessage() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * optional bool heartbeat_message = 9; - * *
      * This field is filled in if the server is sending back a heartbeat message.
      * Heartbeat messages are sent back to the client to prevent the scanner from
      * timing out. Seeing a heartbeat message communicates to the Client that the
      * server would have continued to scan had the time limit not been reached.
      * 
+ * + * optional bool heartbeat_message = 9; */ public boolean getHeartbeatMessage() { return heartbeatMessage_; } - // optional .hbase.pb.ScanMetrics scan_metrics = 10; public static final int SCAN_METRICS_FIELD_NUMBER = 10; private org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_; /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
      * This field is filled in if the client has requested that scan metrics be tracked.
      * The metrics tracked here are sent back to the client to be tracked together with 
      * the existing client side metrics.
      * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public boolean hasScanMetrics() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
      * This field is filled in if the client has requested that scan metrics be tracked.
      * The metrics tracked here are sent back to the client to be tracked together with 
      * the existing client side metrics.
      * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() { - return scanMetrics_; + return scanMetrics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance() : scanMetrics_; } /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
      * This field is filled in if the client has requested that scan metrics be tracked.
      * The metrics tracked here are sent back to the client to be tracked together with 
      * the existing client side metrics.
      * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() { - return scanMetrics_; + return scanMetrics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance() : scanMetrics_; } - private void initFields() { - cellsPerResult_ = java.util.Collections.emptyList(); - scannerId_ = 0L; - moreResults_ = false; - ttl_ = 0; - results_ = java.util.Collections.emptyList(); - stale_ = false; - partialFlagPerResult_ = java.util.Collections.emptyList(); - moreResultsInRegion_ = false; - heartbeatMessage_ = false; - scanMetrics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -19407,7 +19713,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < cellsPerResult_.size(); i++) { output.writeUInt32(1, cellsPerResult_.get(i)); } @@ -19436,14 +19741,13 @@ public final class ClientProtos { output.writeBool(9, heartbeatMessage_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { - output.writeMessage(10, scanMetrics_); + output.writeMessage(10, getScanMetrics()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -19492,21 +19796,15 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(10, scanMetrics_); + .computeMessageSize(10, getScanMetrics()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -19558,12 +19856,10 @@ public final class ClientProtos { result = result && getScanMetrics() .equals(other.getScanMetrics()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -19577,11 +19873,13 @@ public final class ClientProtos { } if (hasScannerId()) { hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getScannerId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getScannerId()); } if (hasMoreResults()) { hash = (37 * hash) + MORE_RESULTS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMoreResults()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getMoreResults()); } if (hasTtl()) { hash = (37 * hash) + TTL_FIELD_NUMBER; @@ -19593,7 +19891,8 @@ public final class ClientProtos { } if (hasStale()) { hash = (37 * hash) + STALE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getStale()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getStale()); } if (getPartialFlagPerResultCount() > 0) { hash = (37 * hash) + PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER; @@ -19601,17 +19900,19 @@ public final class ClientProtos { } if (hasMoreResultsInRegion()) { hash = (37 * hash) + MORE_RESULTS_IN_REGION_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMoreResultsInRegion()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getMoreResultsInRegion()); } if (hasHeartbeatMessage()) { hash = (37 * hash) + HEARTBEAT_MESSAGE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getHeartbeatMessage()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getHeartbeatMessage()); } if (hasScanMetrics()) { hash = (37 * hash) + SCAN_METRICS_FIELD_NUMBER; hash = (53 * hash) + getScanMetrics().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -19639,67 +19940,79 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ScanResponse} - * *
      **
      * The scan response. If there are no more results, more_results will
      * be false.  If it is not specified, it means there are more.
      * 
+ * + * Protobuf type {@code hbase.pb.ScanResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ScanResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -19712,20 +20025,17 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getResultsFieldBuilder(); getScanMetricsFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); cellsPerResult_ = java.util.Collections.emptyList(); @@ -19751,7 +20061,7 @@ public final class ClientProtos { heartbeatMessage_ = false; bitField0_ = (bitField0_ & ~0x00000100); if (scanMetricsBuilder_ == null) { - scanMetrics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance(); + scanMetrics_ = null; } else { scanMetricsBuilder_.clear(); } @@ -19759,10 +20069,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor; @@ -19840,6 +20146,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse)other); @@ -19889,7 +20221,7 @@ public final class ClientProtos { results_ = other.results_; bitField0_ = (bitField0_ & ~0x00000010); resultsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResultsFieldBuilder() : null; } else { resultsBuilder_.addAllMessages(other.results_); @@ -19918,7 +20250,8 @@ public final class ClientProtos { if (other.hasScanMetrics()) { mergeScanMetrics(other.getScanMetrics()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -19935,7 +20268,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -19945,7 +20278,6 @@ public final class ClientProtos { } private int bitField0_; - // repeated uint32 cells_per_result = 1; private java.util.List cellsPerResult_ = java.util.Collections.emptyList(); private void ensureCellsPerResultIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { @@ -19954,8 +20286,6 @@ public final class ClientProtos { } } /** - * repeated uint32 cells_per_result = 1; - * *
        * This field is filled in if we are doing cellblocks.  A cellblock is made up
        * of all Cells serialized out as one cellblock BUT responses from a server
@@ -19965,14 +20295,14 @@ public final class ClientProtos {
        * has 3, 3, 3 in it, then we know that on the client, we are to make
        * three Results each of three Cells each.
        * 
+ * + * repeated uint32 cells_per_result = 1; */ public java.util.List getCellsPerResultList() { return java.util.Collections.unmodifiableList(cellsPerResult_); } /** - * repeated uint32 cells_per_result = 1; - * *
        * This field is filled in if we are doing cellblocks.  A cellblock is made up
        * of all Cells serialized out as one cellblock BUT responses from a server
@@ -19982,13 +20312,13 @@ public final class ClientProtos {
        * has 3, 3, 3 in it, then we know that on the client, we are to make
        * three Results each of three Cells each.
        * 
+ * + * repeated uint32 cells_per_result = 1; */ public int getCellsPerResultCount() { return cellsPerResult_.size(); } /** - * repeated uint32 cells_per_result = 1; - * *
        * This field is filled in if we are doing cellblocks.  A cellblock is made up
        * of all Cells serialized out as one cellblock BUT responses from a server
@@ -19998,13 +20328,13 @@ public final class ClientProtos {
        * has 3, 3, 3 in it, then we know that on the client, we are to make
        * three Results each of three Cells each.
        * 
+ * + * repeated uint32 cells_per_result = 1; */ public int getCellsPerResult(int index) { return cellsPerResult_.get(index); } /** - * repeated uint32 cells_per_result = 1; - * *
        * This field is filled in if we are doing cellblocks.  A cellblock is made up
        * of all Cells serialized out as one cellblock BUT responses from a server
@@ -20014,6 +20344,8 @@ public final class ClientProtos {
        * has 3, 3, 3 in it, then we know that on the client, we are to make
        * three Results each of three Cells each.
        * 
+ * + * repeated uint32 cells_per_result = 1; */ public Builder setCellsPerResult( int index, int value) { @@ -20023,8 +20355,6 @@ public final class ClientProtos { return this; } /** - * repeated uint32 cells_per_result = 1; - * *
        * This field is filled in if we are doing cellblocks.  A cellblock is made up
        * of all Cells serialized out as one cellblock BUT responses from a server
@@ -20034,6 +20364,8 @@ public final class ClientProtos {
        * has 3, 3, 3 in it, then we know that on the client, we are to make
        * three Results each of three Cells each.
        * 
+ * + * repeated uint32 cells_per_result = 1; */ public Builder addCellsPerResult(int value) { ensureCellsPerResultIsMutable(); @@ -20042,8 +20374,6 @@ public final class ClientProtos { return this; } /** - * repeated uint32 cells_per_result = 1; - * *
        * This field is filled in if we are doing cellblocks.  A cellblock is made up
        * of all Cells serialized out as one cellblock BUT responses from a server
@@ -20053,17 +20383,18 @@ public final class ClientProtos {
        * has 3, 3, 3 in it, then we know that on the client, we are to make
        * three Results each of three Cells each.
        * 
+ * + * repeated uint32 cells_per_result = 1; */ public Builder addAllCellsPerResult( java.lang.Iterable values) { ensureCellsPerResultIsMutable(); - super.addAll(values, cellsPerResult_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, cellsPerResult_); onChanged(); return this; } /** - * repeated uint32 cells_per_result = 1; - * *
        * This field is filled in if we are doing cellblocks.  A cellblock is made up
        * of all Cells serialized out as one cellblock BUT responses from a server
@@ -20073,6 +20404,8 @@ public final class ClientProtos {
        * has 3, 3, 3 in it, then we know that on the client, we are to make
        * three Results each of three Cells each.
        * 
+ * + * repeated uint32 cells_per_result = 1; */ public Builder clearCellsPerResult() { cellsPerResult_ = java.util.Collections.emptyList(); @@ -20081,7 +20414,6 @@ public final class ClientProtos { return this; } - // optional uint64 scanner_id = 2; private long scannerId_ ; /** * optional uint64 scanner_id = 2; @@ -20114,7 +20446,6 @@ public final class ClientProtos { return this; } - // optional bool more_results = 3; private boolean moreResults_ ; /** * optional bool more_results = 3; @@ -20147,7 +20478,6 @@ public final class ClientProtos { return this; } - // optional uint32 ttl = 4; private int ttl_ ; /** * optional uint32 ttl = 4; @@ -20180,7 +20510,6 @@ public final class ClientProtos { return this; } - // repeated .hbase.pb.Result results = 5; private java.util.List results_ = java.util.Collections.emptyList(); private void ensureResultsIsMutable() { @@ -20190,17 +20519,17 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> resultsBuilder_; /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public java.util.List getResultsList() { if (resultsBuilder_ == null) { @@ -20210,13 +20539,13 @@ public final class ClientProtos { } } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public int getResultsCount() { if (resultsBuilder_ == null) { @@ -20226,13 +20555,13 @@ public final class ClientProtos { } } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResults(int index) { if (resultsBuilder_ == null) { @@ -20242,13 +20571,13 @@ public final class ClientProtos { } } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public Builder setResults( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { @@ -20265,13 +20594,13 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public Builder setResults( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder builderForValue) { @@ -20285,13 +20614,13 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public Builder addResults(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultsBuilder_ == null) { @@ -20307,13 +20636,13 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public Builder addResults( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { @@ -20330,13 +20659,13 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public Builder addResults( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder builderForValue) { @@ -20350,13 +20679,13 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public Builder addResults( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder builderForValue) { @@ -20370,19 +20699,20 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public Builder addAllResults( java.lang.Iterable values) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); - super.addAll(values, results_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, results_); onChanged(); } else { resultsBuilder_.addAllMessages(values); @@ -20390,13 +20720,13 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public Builder clearResults() { if (resultsBuilder_ == null) { @@ -20409,13 +20739,13 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public Builder removeResults(int index) { if (resultsBuilder_ == null) { @@ -20428,26 +20758,26 @@ public final class ClientProtos { return this; } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder getResultsBuilder( int index) { return getResultsFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder( int index) { @@ -20457,13 +20787,13 @@ public final class ClientProtos { } } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public java.util.List getResultsOrBuilderList() { @@ -20474,26 +20804,26 @@ public final class ClientProtos { } } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder() { return getResultsFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()); } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder( int index) { @@ -20501,23 +20831,23 @@ public final class ClientProtos { index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()); } /** - * repeated .hbase.pb.Result results = 5; - * *
        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
        * This field is mutually exclusive with cells_per_result (since the Cells will
        * be inside the pb'd Result)
        * 
+ * + * repeated .hbase.pb.Result results = 5; */ public java.util.List getResultsBuilderList() { return getResultsFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultsFieldBuilder() { if (resultsBuilder_ == null) { - resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder>( results_, ((bitField0_ & 0x00000010) == 0x00000010), @@ -20528,7 +20858,6 @@ public final class ClientProtos { return resultsBuilder_; } - // optional bool stale = 6; private boolean stale_ ; /** * optional bool stale = 6; @@ -20561,7 +20890,6 @@ public final class ClientProtos { return this; } - // repeated bool partial_flag_per_result = 7; private java.util.List partialFlagPerResult_ = java.util.Collections.emptyList(); private void ensurePartialFlagPerResultIsMutable() { if (!((bitField0_ & 0x00000040) == 0x00000040)) { @@ -20570,8 +20898,6 @@ public final class ClientProtos { } } /** - * repeated bool partial_flag_per_result = 7; - * *
        * This field is filled in if we are doing cellblocks. In the event that a row
        * could not fit all of its cells into a single RPC chunk, the results will be
@@ -20581,14 +20907,14 @@ public final class ClientProtos {
        * has false, false, true in it, then we know that on the client side, we need to
        * make another RPC request since the last result was only a partial.
        * 
+ * + * repeated bool partial_flag_per_result = 7; */ public java.util.List getPartialFlagPerResultList() { return java.util.Collections.unmodifiableList(partialFlagPerResult_); } /** - * repeated bool partial_flag_per_result = 7; - * *
        * This field is filled in if we are doing cellblocks. In the event that a row
        * could not fit all of its cells into a single RPC chunk, the results will be
@@ -20598,13 +20924,13 @@ public final class ClientProtos {
        * has false, false, true in it, then we know that on the client side, we need to
        * make another RPC request since the last result was only a partial.
        * 
+ * + * repeated bool partial_flag_per_result = 7; */ public int getPartialFlagPerResultCount() { return partialFlagPerResult_.size(); } /** - * repeated bool partial_flag_per_result = 7; - * *
        * This field is filled in if we are doing cellblocks. In the event that a row
        * could not fit all of its cells into a single RPC chunk, the results will be
@@ -20614,13 +20940,13 @@ public final class ClientProtos {
        * has false, false, true in it, then we know that on the client side, we need to
        * make another RPC request since the last result was only a partial.
        * 
+ * + * repeated bool partial_flag_per_result = 7; */ public boolean getPartialFlagPerResult(int index) { return partialFlagPerResult_.get(index); } /** - * repeated bool partial_flag_per_result = 7; - * *
        * This field is filled in if we are doing cellblocks. In the event that a row
        * could not fit all of its cells into a single RPC chunk, the results will be
@@ -20630,6 +20956,8 @@ public final class ClientProtos {
        * has false, false, true in it, then we know that on the client side, we need to
        * make another RPC request since the last result was only a partial.
        * 
+ * + * repeated bool partial_flag_per_result = 7; */ public Builder setPartialFlagPerResult( int index, boolean value) { @@ -20639,8 +20967,6 @@ public final class ClientProtos { return this; } /** - * repeated bool partial_flag_per_result = 7; - * *
        * This field is filled in if we are doing cellblocks. In the event that a row
        * could not fit all of its cells into a single RPC chunk, the results will be
@@ -20650,6 +20976,8 @@ public final class ClientProtos {
        * has false, false, true in it, then we know that on the client side, we need to
        * make another RPC request since the last result was only a partial.
        * 
+ * + * repeated bool partial_flag_per_result = 7; */ public Builder addPartialFlagPerResult(boolean value) { ensurePartialFlagPerResultIsMutable(); @@ -20658,8 +20986,6 @@ public final class ClientProtos { return this; } /** - * repeated bool partial_flag_per_result = 7; - * *
        * This field is filled in if we are doing cellblocks. In the event that a row
        * could not fit all of its cells into a single RPC chunk, the results will be
@@ -20669,17 +20995,18 @@ public final class ClientProtos {
        * has false, false, true in it, then we know that on the client side, we need to
        * make another RPC request since the last result was only a partial.
        * 
+ * + * repeated bool partial_flag_per_result = 7; */ public Builder addAllPartialFlagPerResult( java.lang.Iterable values) { ensurePartialFlagPerResultIsMutable(); - super.addAll(values, partialFlagPerResult_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, partialFlagPerResult_); onChanged(); return this; } /** - * repeated bool partial_flag_per_result = 7; - * *
        * This field is filled in if we are doing cellblocks. In the event that a row
        * could not fit all of its cells into a single RPC chunk, the results will be
@@ -20689,6 +21016,8 @@ public final class ClientProtos {
        * has false, false, true in it, then we know that on the client side, we need to
        * make another RPC request since the last result was only a partial.
        * 
+ * + * repeated bool partial_flag_per_result = 7; */ public Builder clearPartialFlagPerResult() { partialFlagPerResult_ = java.util.Collections.emptyList(); @@ -20697,40 +21026,39 @@ public final class ClientProtos { return this; } - // optional bool more_results_in_region = 8; private boolean moreResultsInRegion_ ; /** - * optional bool more_results_in_region = 8; - * *
        * A server may choose to limit the number of results returned to the client for
        * reasons such as the size in bytes or quantity of results accumulated. This field
        * will true when more results exist in the current region.
        * 
+ * + * optional bool more_results_in_region = 8; */ public boolean hasMoreResultsInRegion() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** - * optional bool more_results_in_region = 8; - * *
        * A server may choose to limit the number of results returned to the client for
        * reasons such as the size in bytes or quantity of results accumulated. This field
        * will true when more results exist in the current region.
        * 
+ * + * optional bool more_results_in_region = 8; */ public boolean getMoreResultsInRegion() { return moreResultsInRegion_; } /** - * optional bool more_results_in_region = 8; - * *
        * A server may choose to limit the number of results returned to the client for
        * reasons such as the size in bytes or quantity of results accumulated. This field
        * will true when more results exist in the current region.
        * 
+ * + * optional bool more_results_in_region = 8; */ public Builder setMoreResultsInRegion(boolean value) { bitField0_ |= 0x00000080; @@ -20739,13 +21067,13 @@ public final class ClientProtos { return this; } /** - * optional bool more_results_in_region = 8; - * *
        * A server may choose to limit the number of results returned to the client for
        * reasons such as the size in bytes or quantity of results accumulated. This field
        * will true when more results exist in the current region.
        * 
+ * + * optional bool more_results_in_region = 8; */ public Builder clearMoreResultsInRegion() { bitField0_ = (bitField0_ & ~0x00000080); @@ -20754,43 +21082,42 @@ public final class ClientProtos { return this; } - // optional bool heartbeat_message = 9; private boolean heartbeatMessage_ ; /** - * optional bool heartbeat_message = 9; - * *
        * This field is filled in if the server is sending back a heartbeat message.
        * Heartbeat messages are sent back to the client to prevent the scanner from
        * timing out. Seeing a heartbeat message communicates to the Client that the
        * server would have continued to scan had the time limit not been reached.
        * 
+ * + * optional bool heartbeat_message = 9; */ public boolean hasHeartbeatMessage() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** - * optional bool heartbeat_message = 9; - * *
        * This field is filled in if the server is sending back a heartbeat message.
        * Heartbeat messages are sent back to the client to prevent the scanner from
        * timing out. Seeing a heartbeat message communicates to the Client that the
        * server would have continued to scan had the time limit not been reached.
        * 
+ * + * optional bool heartbeat_message = 9; */ public boolean getHeartbeatMessage() { return heartbeatMessage_; } /** - * optional bool heartbeat_message = 9; - * *
        * This field is filled in if the server is sending back a heartbeat message.
        * Heartbeat messages are sent back to the client to prevent the scanner from
        * timing out. Seeing a heartbeat message communicates to the Client that the
        * server would have continued to scan had the time limit not been reached.
        * 
+ * + * optional bool heartbeat_message = 9; */ public Builder setHeartbeatMessage(boolean value) { bitField0_ |= 0x00000100; @@ -20799,14 +21126,14 @@ public final class ClientProtos { return this; } /** - * optional bool heartbeat_message = 9; - * *
        * This field is filled in if the server is sending back a heartbeat message.
        * Heartbeat messages are sent back to the client to prevent the scanner from
        * timing out. Seeing a heartbeat message communicates to the Client that the
        * server would have continued to scan had the time limit not been reached.
        * 
+ * + * optional bool heartbeat_message = 9; */ public Builder clearHeartbeatMessage() { bitField0_ = (bitField0_ & ~0x00000100); @@ -20815,46 +21142,45 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.ScanMetrics scan_metrics = 10; - private org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder> scanMetricsBuilder_; /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
        * This field is filled in if the client has requested that scan metrics be tracked.
        * The metrics tracked here are sent back to the client to be tracked together with 
        * the existing client side metrics.
        * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public boolean hasScanMetrics() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
        * This field is filled in if the client has requested that scan metrics be tracked.
        * The metrics tracked here are sent back to the client to be tracked together with 
        * the existing client side metrics.
        * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() { if (scanMetricsBuilder_ == null) { - return scanMetrics_; + return scanMetrics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance() : scanMetrics_; } else { return scanMetricsBuilder_.getMessage(); } } /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
        * This field is filled in if the client has requested that scan metrics be tracked.
        * The metrics tracked here are sent back to the client to be tracked together with 
        * the existing client side metrics.
        * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public Builder setScanMetrics(org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics value) { if (scanMetricsBuilder_ == null) { @@ -20870,13 +21196,13 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
        * This field is filled in if the client has requested that scan metrics be tracked.
        * The metrics tracked here are sent back to the client to be tracked together with 
        * the existing client side metrics.
        * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public Builder setScanMetrics( org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder builderForValue) { @@ -20890,17 +21216,18 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
        * This field is filled in if the client has requested that scan metrics be tracked.
        * The metrics tracked here are sent back to the client to be tracked together with 
        * the existing client side metrics.
        * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public Builder mergeScanMetrics(org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics value) { if (scanMetricsBuilder_ == null) { if (((bitField0_ & 0x00000200) == 0x00000200) && + scanMetrics_ != null && scanMetrics_ != org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance()) { scanMetrics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.newBuilder(scanMetrics_).mergeFrom(value).buildPartial(); @@ -20915,17 +21242,17 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
        * This field is filled in if the client has requested that scan metrics be tracked.
        * The metrics tracked here are sent back to the client to be tracked together with 
        * the existing client side metrics.
        * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public Builder clearScanMetrics() { if (scanMetricsBuilder_ == null) { - scanMetrics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance(); + scanMetrics_ = null; onChanged(); } else { scanMetricsBuilder_.clear(); @@ -20934,13 +21261,13 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
        * This field is filled in if the client has requested that scan metrics be tracked.
        * The metrics tracked here are sent back to the client to be tracked together with 
        * the existing client side metrics.
        * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder getScanMetricsBuilder() { bitField0_ |= 0x00000200; @@ -20948,59 +21275,97 @@ public final class ClientProtos { return getScanMetricsFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
        * This field is filled in if the client has requested that scan metrics be tracked.
        * The metrics tracked here are sent back to the client to be tracked together with 
        * the existing client side metrics.
        * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() { if (scanMetricsBuilder_ != null) { return scanMetricsBuilder_.getMessageOrBuilder(); } else { - return scanMetrics_; + return scanMetrics_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance() : scanMetrics_; } } /** - * optional .hbase.pb.ScanMetrics scan_metrics = 10; - * *
        * This field is filled in if the client has requested that scan metrics be tracked.
        * The metrics tracked here are sent back to the client to be tracked together with 
        * the existing client side metrics.
        * 
+ * + * optional .hbase.pb.ScanMetrics scan_metrics = 10; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder> getScanMetricsFieldBuilder() { if (scanMetricsBuilder_ == null) { - scanMetricsBuilder_ = new com.google.protobuf.SingleFieldBuilder< + scanMetricsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder>( - scanMetrics_, + getScanMetrics(), getParentForChildren(), isClean()); scanMetrics_ = null; } return scanMetricsBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ScanResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ScanResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse DEFAULT_INSTANCE; static { - defaultInstance = new ScanResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ScanResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ScanResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ScanResponse) } - public interface BulkLoadHFileRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface BulkLoadHFileRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BulkLoadHFileRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -21014,7 +21379,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2; /** * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2; */ @@ -21039,7 +21403,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index); - // optional bool assign_seq_num = 3; /** * optional bool assign_seq_num = 3; */ @@ -21049,7 +21412,6 @@ public final class ClientProtos { */ boolean getAssignSeqNum(); - // optional .hbase.pb.DelegationToken fs_token = 4; /** * optional .hbase.pb.DelegationToken fs_token = 4; */ @@ -21063,7 +21425,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder(); - // optional string bulk_token = 5; /** * optional string bulk_token = 5; */ @@ -21078,7 +21439,6 @@ public final class ClientProtos { com.google.protobuf.ByteString getBulkTokenBytes(); - // optional bool copy_file = 6 [default = false]; /** * optional bool copy_file = 6 [default = false]; */ @@ -21089,44 +21449,39 @@ public final class ClientProtos { boolean getCopyFile(); } /** - * Protobuf type {@code hbase.pb.BulkLoadHFileRequest} - * *
    **
    * Atomically bulk load multiple HFiles (say from different column families)
    * into an open region.
    * 
+ * + * Protobuf type {@code hbase.pb.BulkLoadHFileRequest} */ - public static final class BulkLoadHFileRequest extends - com.google.protobuf.GeneratedMessage - implements BulkLoadHFileRequestOrBuilder { + public static final class BulkLoadHFileRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BulkLoadHFileRequest) + BulkLoadHFileRequestOrBuilder { // Use BulkLoadHFileRequest.newBuilder() to construct. - private BulkLoadHFileRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private BulkLoadHFileRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private BulkLoadHFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BulkLoadHFileRequest defaultInstance; - public static BulkLoadHFileRequest getDefaultInstance() { - return defaultInstance; - } - - public BulkLoadHFileRequest getDefaultInstanceForType() { - return defaultInstance; + private BulkLoadHFileRequest() { + familyPath_ = java.util.Collections.emptyList(); + assignSeqNum_ = false; + bulkToken_ = ""; + copyFile_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private BulkLoadHFileRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -21163,7 +21518,8 @@ public final class ClientProtos { familyPath_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - familyPath_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry)); + familyPath_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry)); break; } case 24: { @@ -21185,8 +21541,9 @@ public final class ClientProtos { break; } case 42: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; - bulkToken_ = input.readBytes(); + bulkToken_ = bs; break; } case 48: { @@ -21200,7 +21557,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { familyPath_ = java.util.Collections.unmodifiableList(familyPath_); @@ -21214,32 +21571,17 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BulkLoadHFileRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BulkLoadHFileRequest(input, extensionRegistry); - } - }; + public interface FamilyPathOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BulkLoadHFileRequest.FamilyPath) + com.google.protobuf.MessageOrBuilder { - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public interface FamilyPathOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; /** * required bytes family = 1; */ @@ -21249,7 +21591,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getFamily(); - // required string path = 2; /** * required string path = 2; */ @@ -21267,36 +21608,29 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.BulkLoadHFileRequest.FamilyPath} */ - public static final class FamilyPath extends - com.google.protobuf.GeneratedMessage - implements FamilyPathOrBuilder { + public static final class FamilyPath extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BulkLoadHFileRequest.FamilyPath) + FamilyPathOrBuilder { // Use FamilyPath.newBuilder() to construct. - private FamilyPath(com.google.protobuf.GeneratedMessage.Builder builder) { + private FamilyPath(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private FamilyPath(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FamilyPath defaultInstance; - public static FamilyPath getDefaultInstance() { - return defaultInstance; - } - - public FamilyPath getDefaultInstanceForType() { - return defaultInstance; + private FamilyPath() { + family_ = com.google.protobuf.ByteString.EMPTY; + path_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FamilyPath( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -21321,8 +21655,9 @@ public final class ClientProtos { break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - path_ = input.readBytes(); + path_ = bs; break; } } @@ -21331,7 +21666,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -21342,30 +21677,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FamilyPath parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FamilyPath(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes family = 1; public static final int FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString family_; /** @@ -21381,9 +21700,8 @@ public final class ClientProtos { return family_; } - // required string path = 2; public static final int PATH_FIELD_NUMBER = 2; - private java.lang.Object path_; + private volatile java.lang.Object path_; /** * required string path = 2; */ @@ -21424,14 +21742,11 @@ public final class ClientProtos { } } - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - path_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFamily()) { memoizedIsInitialized = 0; @@ -21447,19 +21762,17 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, family_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getPathBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, path_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -21468,22 +21781,15 @@ public final class ClientProtos { .computeBytesSize(1, family_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getPathBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, path_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -21504,12 +21810,10 @@ public final class ClientProtos { result = result && getPath() .equals(other.getPath()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -21525,7 +21829,7 @@ public final class ClientProtos { hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPath().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -21553,46 +21857,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -21600,14 +21915,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.BulkLoadHFileRequest.FamilyPath} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BulkLoadHFileRequest.FamilyPath) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -21620,18 +21936,15 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; @@ -21641,10 +21954,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor; @@ -21679,6 +21988,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other); @@ -21698,17 +22033,16 @@ public final class ClientProtos { path_ = other.path_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFamily()) { - return false; } if (!hasPath()) { - return false; } return true; @@ -21723,7 +22057,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -21733,7 +22067,6 @@ public final class ClientProtos { } private int bitField0_; - // required bytes family = 1; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family = 1; @@ -21769,7 +22102,6 @@ public final class ClientProtos { return this; } - // required string path = 2; private java.lang.Object path_ = ""; /** * required string path = 2; @@ -21783,9 +22115,12 @@ public final class ClientProtos { public java.lang.String getPath() { java.lang.Object ref = path_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - path_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + path_ = s; + } return s; } else { return (java.lang.String) ref; @@ -21842,20 +22177,56 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileRequest.FamilyPath) } + // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileRequest.FamilyPath) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath DEFAULT_INSTANCE; static { - defaultInstance = new FamilyPath(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FamilyPath parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FamilyPath(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileRequest.FamilyPath) } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -21868,16 +22239,15 @@ public final class ClientProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2; public static final int FAMILY_PATH_FIELD_NUMBER = 2; private java.util.List familyPath_; /** @@ -21913,7 +22283,6 @@ public final class ClientProtos { return familyPath_.get(index); } - // optional bool assign_seq_num = 3; public static final int ASSIGN_SEQ_NUM_FIELD_NUMBER = 3; private boolean assignSeqNum_; /** @@ -21929,7 +22298,6 @@ public final class ClientProtos { return assignSeqNum_; } - // optional .hbase.pb.DelegationToken fs_token = 4; public static final int FS_TOKEN_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken fsToken_; /** @@ -21942,18 +22310,17 @@ public final class ClientProtos { * optional .hbase.pb.DelegationToken fs_token = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken getFsToken() { - return fsToken_; + return fsToken_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance() : fsToken_; } /** * optional .hbase.pb.DelegationToken fs_token = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { - return fsToken_; + return fsToken_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance() : fsToken_; } - // optional string bulk_token = 5; public static final int BULK_TOKEN_FIELD_NUMBER = 5; - private java.lang.Object bulkToken_; + private volatile java.lang.Object bulkToken_; /** * optional string bulk_token = 5; */ @@ -21994,7 +22361,6 @@ public final class ClientProtos { } } - // optional bool copy_file = 6 [default = false]; public static final int COPY_FILE_FIELD_NUMBER = 6; private boolean copyFile_; /** @@ -22010,18 +22376,11 @@ public final class ClientProtos { return copyFile_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - familyPath_ = java.util.Collections.emptyList(); - assignSeqNum_ = false; - fsToken_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); - bulkToken_ = ""; - copyFile_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -22043,9 +22402,8 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } for (int i = 0; i < familyPath_.size(); i++) { output.writeMessage(2, familyPath_.get(i)); @@ -22054,26 +22412,25 @@ public final class ClientProtos { output.writeBool(3, assignSeqNum_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(4, fsToken_); + output.writeMessage(4, getFsToken()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(5, getBulkTokenBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, bulkToken_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(6, copyFile_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } for (int i = 0; i < familyPath_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -22085,29 +22442,22 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, fsToken_); + .computeMessageSize(4, getFsToken()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(5, getBulkTokenBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, bulkToken_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(6, copyFile_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -22145,12 +22495,10 @@ public final class ClientProtos { result = result && (getCopyFile() == other.getCopyFile()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -22168,7 +22516,8 @@ public final class ClientProtos { } if (hasAssignSeqNum()) { hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getAssignSeqNum()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getAssignSeqNum()); } if (hasFsToken()) { hash = (37 * hash) + FS_TOKEN_FIELD_NUMBER; @@ -22180,9 +22529,10 @@ public final class ClientProtos { } if (hasCopyFile()) { hash = (37 * hash) + COPY_FILE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCopyFile()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getCopyFile()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -22210,67 +22560,79 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.BulkLoadHFileRequest} - * *
      **
      * Atomically bulk load multiple HFiles (say from different column families)
      * into an open region.
      * 
+ * + * Protobuf type {@code hbase.pb.BulkLoadHFileRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BulkLoadHFileRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -22283,25 +22645,22 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getFamilyPathFieldBuilder(); getFsTokenFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -22315,7 +22674,7 @@ public final class ClientProtos { assignSeqNum_ = false; bitField0_ = (bitField0_ & ~0x00000004); if (fsTokenBuilder_ == null) { - fsToken_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + fsToken_ = null; } else { fsTokenBuilder_.clear(); } @@ -22327,10 +22686,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor; @@ -22394,6 +22749,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other); @@ -22427,7 +22808,7 @@ public final class ClientProtos { familyPath_ = other.familyPath_; bitField0_ = (bitField0_ & ~0x00000002); familyPathBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFamilyPathFieldBuilder() : null; } else { familyPathBuilder_.addAllMessages(other.familyPath_); @@ -22448,22 +22829,20 @@ public final class ClientProtos { if (other.hasCopyFile()) { setCopyFile(other.getCopyFile()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } for (int i = 0; i < getFamilyPathCount(); i++) { if (!getFamilyPath(i).isInitialized()) { - return false; } } @@ -22479,7 +22858,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -22489,9 +22868,8 @@ public final class ClientProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -22504,7 +22882,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -22545,6 +22923,7 @@ public final class ClientProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -22563,7 +22942,7 @@ public final class ClientProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -22586,19 +22965,20 @@ public final class ClientProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -22606,7 +22986,6 @@ public final class ClientProtos { return regionBuilder_; } - // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2; private java.util.List familyPath_ = java.util.Collections.emptyList(); private void ensureFamilyPathIsMutable() { @@ -22616,7 +22995,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; /** @@ -22748,7 +23127,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); - super.addAll(values, familyPath_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, familyPath_); onChanged(); } else { familyPathBuilder_.addAllMessages(values); @@ -22831,11 +23211,11 @@ public final class ClientProtos { getFamilyPathBuilderList() { return getFamilyPathFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> getFamilyPathFieldBuilder() { if (familyPathBuilder_ == null) { - familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( familyPath_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -22846,7 +23226,6 @@ public final class ClientProtos { return familyPathBuilder_; } - // optional bool assign_seq_num = 3; private boolean assignSeqNum_ ; /** * optional bool assign_seq_num = 3; @@ -22879,9 +23258,8 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.DelegationToken fs_token = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken fsToken_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> fsTokenBuilder_; /** * optional .hbase.pb.DelegationToken fs_token = 4; @@ -22894,7 +23272,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken getFsToken() { if (fsTokenBuilder_ == null) { - return fsToken_; + return fsToken_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance() : fsToken_; } else { return fsTokenBuilder_.getMessage(); } @@ -22935,6 +23313,7 @@ public final class ClientProtos { public Builder mergeFsToken(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken value) { if (fsTokenBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + fsToken_ != null && fsToken_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) { fsToken_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial(); @@ -22953,7 +23332,7 @@ public final class ClientProtos { */ public Builder clearFsToken() { if (fsTokenBuilder_ == null) { - fsToken_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + fsToken_ = null; onChanged(); } else { fsTokenBuilder_.clear(); @@ -22976,19 +23355,20 @@ public final class ClientProtos { if (fsTokenBuilder_ != null) { return fsTokenBuilder_.getMessageOrBuilder(); } else { - return fsToken_; + return fsToken_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance() : fsToken_; } } /** * optional .hbase.pb.DelegationToken fs_token = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> getFsTokenFieldBuilder() { if (fsTokenBuilder_ == null) { - fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder< + fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>( - fsToken_, + getFsToken(), getParentForChildren(), isClean()); fsToken_ = null; @@ -22996,7 +23376,6 @@ public final class ClientProtos { return fsTokenBuilder_; } - // optional string bulk_token = 5; private java.lang.Object bulkToken_ = ""; /** * optional string bulk_token = 5; @@ -23010,9 +23389,12 @@ public final class ClientProtos { public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - bulkToken_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + bulkToken_ = s; + } return s; } else { return (java.lang.String) ref; @@ -23070,7 +23452,6 @@ public final class ClientProtos { return this; } - // optional bool copy_file = 6 [default = false]; private boolean copyFile_ ; /** * optional bool copy_file = 6 [default = false]; @@ -23102,22 +23483,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest DEFAULT_INSTANCE; static { - defaultInstance = new BulkLoadHFileRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public BulkLoadHFileRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BulkLoadHFileRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileRequest) } - public interface BulkLoadHFileResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface BulkLoadHFileResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BulkLoadHFileResponse) + com.google.protobuf.MessageOrBuilder { - // required bool loaded = 1; /** * required bool loaded = 1; */ @@ -23130,36 +23548,28 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.BulkLoadHFileResponse} */ - public static final class BulkLoadHFileResponse extends - com.google.protobuf.GeneratedMessage - implements BulkLoadHFileResponseOrBuilder { + public static final class BulkLoadHFileResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BulkLoadHFileResponse) + BulkLoadHFileResponseOrBuilder { // Use BulkLoadHFileResponse.newBuilder() to construct. - private BulkLoadHFileResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private BulkLoadHFileResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private BulkLoadHFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BulkLoadHFileResponse defaultInstance; - public static BulkLoadHFileResponse getDefaultInstance() { - return defaultInstance; } - - public BulkLoadHFileResponse getDefaultInstanceForType() { - return defaultInstance; + private BulkLoadHFileResponse() { + loaded_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private BulkLoadHFileResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -23189,7 +23599,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -23200,30 +23610,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BulkLoadHFileResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BulkLoadHFileResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool loaded = 1; public static final int LOADED_FIELD_NUMBER = 1; private boolean loaded_; /** @@ -23239,13 +23633,11 @@ public final class ClientProtos { return loaded_; } - private void initFields() { - loaded_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasLoaded()) { memoizedIsInitialized = 0; @@ -23257,16 +23649,14 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, loaded_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -23274,19 +23664,13 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, loaded_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -23302,12 +23686,10 @@ public final class ClientProtos { result = result && (getLoaded() == other.getLoaded()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -23317,9 +23699,10 @@ public final class ClientProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLoaded()) { hash = (37 * hash) + LOADED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getLoaded()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getLoaded()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -23347,46 +23730,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -23394,14 +23788,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.BulkLoadHFileResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BulkLoadHFileResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -23414,18 +23809,15 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); loaded_ = false; @@ -23433,10 +23825,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; @@ -23467,6 +23855,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other); @@ -23481,13 +23895,13 @@ public final class ClientProtos { if (other.hasLoaded()) { setLoaded(other.getLoaded()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasLoaded()) { - return false; } return true; @@ -23502,7 +23916,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -23512,7 +23926,6 @@ public final class ClientProtos { } private int bitField0_; - // required bool loaded = 1; private boolean loaded_ ; /** * required bool loaded = 1; @@ -23544,22 +23957,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse DEFAULT_INSTANCE; static { - defaultInstance = new BulkLoadHFileResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public BulkLoadHFileResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BulkLoadHFileResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileResponse) } - public interface DelegationTokenOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DelegationTokenOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DelegationToken) + com.google.protobuf.MessageOrBuilder { - // optional bytes identifier = 1; /** * optional bytes identifier = 1; */ @@ -23569,7 +24019,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getIdentifier(); - // optional bytes password = 2; /** * optional bytes password = 2; */ @@ -23579,7 +24028,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getPassword(); - // optional string kind = 3; /** * optional string kind = 3; */ @@ -23594,7 +24042,6 @@ public final class ClientProtos { com.google.protobuf.ByteString getKindBytes(); - // optional string service = 4; /** * optional string service = 4; */ @@ -23612,36 +24059,31 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.DelegationToken} */ - public static final class DelegationToken extends - com.google.protobuf.GeneratedMessage - implements DelegationTokenOrBuilder { + public static final class DelegationToken extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DelegationToken) + DelegationTokenOrBuilder { // Use DelegationToken.newBuilder() to construct. - private DelegationToken(com.google.protobuf.GeneratedMessage.Builder builder) { + private DelegationToken(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DelegationToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DelegationToken defaultInstance; - public static DelegationToken getDefaultInstance() { - return defaultInstance; } - - public DelegationToken getDefaultInstanceForType() { - return defaultInstance; + private DelegationToken() { + identifier_ = com.google.protobuf.ByteString.EMPTY; + password_ = com.google.protobuf.ByteString.EMPTY; + kind_ = ""; + service_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DelegationToken( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -23671,13 +24113,15 @@ public final class ClientProtos { break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - kind_ = input.readBytes(); + kind_ = bs; break; } case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; - service_ = input.readBytes(); + service_ = bs; break; } } @@ -23686,7 +24130,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -23697,30 +24141,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DelegationToken parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DelegationToken(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bytes identifier = 1; public static final int IDENTIFIER_FIELD_NUMBER = 1; private com.google.protobuf.ByteString identifier_; /** @@ -23736,7 +24164,6 @@ public final class ClientProtos { return identifier_; } - // optional bytes password = 2; public static final int PASSWORD_FIELD_NUMBER = 2; private com.google.protobuf.ByteString password_; /** @@ -23752,9 +24179,8 @@ public final class ClientProtos { return password_; } - // optional string kind = 3; public static final int KIND_FIELD_NUMBER = 3; - private java.lang.Object kind_; + private volatile java.lang.Object kind_; /** * optional string kind = 3; */ @@ -23795,9 +24221,8 @@ public final class ClientProtos { } } - // optional string service = 4; public static final int SERVICE_FIELD_NUMBER = 4; - private java.lang.Object service_; + private volatile java.lang.Object service_; /** * optional string service = 4; */ @@ -23838,16 +24263,11 @@ public final class ClientProtos { } } - private void initFields() { - identifier_ = com.google.protobuf.ByteString.EMPTY; - password_ = com.google.protobuf.ByteString.EMPTY; - kind_ = ""; - service_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -23855,7 +24275,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, identifier_); } @@ -23863,17 +24282,16 @@ public final class ClientProtos { output.writeBytes(2, password_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getKindBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, kind_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, getServiceBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, service_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -23886,26 +24304,18 @@ public final class ClientProtos { .computeBytesSize(2, password_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getKindBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, kind_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getServiceBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, service_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -23936,12 +24346,10 @@ public final class ClientProtos { result = result && getService() .equals(other.getService()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -23965,7 +24373,7 @@ public final class ClientProtos { hash = (37 * hash) + SERVICE_FIELD_NUMBER; hash = (53 * hash) + getService().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -23993,46 +24401,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -24040,14 +24459,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.DelegationToken} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DelegationToken) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -24060,18 +24480,15 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); identifier_ = com.google.protobuf.ByteString.EMPTY; @@ -24085,10 +24502,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; @@ -24131,6 +24544,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken)other); @@ -24158,7 +24597,8 @@ public final class ClientProtos { service_ = other.service_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -24175,7 +24615,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -24185,7 +24625,6 @@ public final class ClientProtos { } private int bitField0_; - // optional bytes identifier = 1; private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes identifier = 1; @@ -24221,7 +24660,6 @@ public final class ClientProtos { return this; } - // optional bytes password = 2; private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes password = 2; @@ -24257,7 +24695,6 @@ public final class ClientProtos { return this; } - // optional string kind = 3; private java.lang.Object kind_ = ""; /** * optional string kind = 3; @@ -24271,9 +24708,12 @@ public final class ClientProtos { public java.lang.String getKind() { java.lang.Object ref = kind_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - kind_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + kind_ = s; + } return s; } else { return (java.lang.String) ref; @@ -24331,7 +24771,6 @@ public final class ClientProtos { return this; } - // optional string service = 4; private java.lang.Object service_ = ""; /** * optional string service = 4; @@ -24345,9 +24784,12 @@ public final class ClientProtos { public java.lang.String getService() { java.lang.Object ref = service_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - service_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + service_ = s; + } return s; } else { return (java.lang.String) ref; @@ -24404,22 +24846,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DelegationToken) } + // @@protoc_insertion_point(class_scope:hbase.pb.DelegationToken) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken DEFAULT_INSTANCE; static { - defaultInstance = new DelegationToken(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DelegationToken parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DelegationToken(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DelegationToken) } - public interface PrepareBulkLoadRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface PrepareBulkLoadRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.PrepareBulkLoadRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -24433,7 +24912,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // optional .hbase.pb.RegionSpecifier region = 2; /** * optional .hbase.pb.RegionSpecifier region = 2; */ @@ -24450,36 +24928,27 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.PrepareBulkLoadRequest} */ - public static final class PrepareBulkLoadRequest extends - com.google.protobuf.GeneratedMessage - implements PrepareBulkLoadRequestOrBuilder { + public static final class PrepareBulkLoadRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.PrepareBulkLoadRequest) + PrepareBulkLoadRequestOrBuilder { // Use PrepareBulkLoadRequest.newBuilder() to construct. - private PrepareBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private PrepareBulkLoadRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private PrepareBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final PrepareBulkLoadRequest defaultInstance; - public static PrepareBulkLoadRequest getDefaultInstance() { - return defaultInstance; } - - public PrepareBulkLoadRequest getDefaultInstanceForType() { - return defaultInstance; + private PrepareBulkLoadRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private PrepareBulkLoadRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -24530,7 +24999,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -24541,30 +25010,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public PrepareBulkLoadRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new PrepareBulkLoadRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -24577,16 +25030,15 @@ public final class ClientProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // optional .hbase.pb.RegionSpecifier region = 2; public static final int REGION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -24599,23 +25051,20 @@ public final class ClientProtos { * optional .hbase.pb.RegionSpecifier region = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * optional .hbase.pb.RegionSpecifier region = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -24637,43 +25086,35 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, region_); + output.writeMessage(2, getRegion()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, region_); + .computeMessageSize(2, getRegion()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -24694,12 +25135,10 @@ public final class ClientProtos { result = result && getRegion() .equals(other.getRegion()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -24715,7 +25154,7 @@ public final class ClientProtos { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -24743,46 +25182,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -24790,14 +25240,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.PrepareBulkLoadRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.PrepareBulkLoadRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -24810,30 +25261,27 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -24841,10 +25289,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; @@ -24887,6 +25331,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)other); @@ -24904,22 +25374,20 @@ public final class ClientProtos { if (other.hasRegion()) { mergeRegion(other.getRegion()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!getTableName().isInitialized()) { - return false; } if (hasRegion()) { if (!getRegion().isInitialized()) { - return false; } } @@ -24935,7 +25403,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -24945,9 +25413,8 @@ public final class ClientProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -24960,7 +25427,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -25001,6 +25468,7 @@ public final class ClientProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -25019,7 +25487,7 @@ public final class ClientProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -25042,19 +25510,20 @@ public final class ClientProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -25062,9 +25531,8 @@ public final class ClientProtos { return tableNameBuilder_; } - // optional .hbase.pb.RegionSpecifier region = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * optional .hbase.pb.RegionSpecifier region = 2; @@ -25077,7 +25545,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -25118,6 +25586,7 @@ public final class ClientProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -25136,7 +25605,7 @@ public final class ClientProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -25159,41 +25628,79 @@ public final class ClientProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * optional .hbase.pb.RegionSpecifier region = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.PrepareBulkLoadRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest DEFAULT_INSTANCE; static { - defaultInstance = new PrepareBulkLoadRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public PrepareBulkLoadRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrepareBulkLoadRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadRequest) } - public interface PrepareBulkLoadResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface PrepareBulkLoadResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.PrepareBulkLoadResponse) + com.google.protobuf.MessageOrBuilder { - // required string bulk_token = 1; /** * required string bulk_token = 1; */ @@ -25211,36 +25718,28 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.PrepareBulkLoadResponse} */ - public static final class PrepareBulkLoadResponse extends - com.google.protobuf.GeneratedMessage - implements PrepareBulkLoadResponseOrBuilder { + public static final class PrepareBulkLoadResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.PrepareBulkLoadResponse) + PrepareBulkLoadResponseOrBuilder { // Use PrepareBulkLoadResponse.newBuilder() to construct. - private PrepareBulkLoadResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private PrepareBulkLoadResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private PrepareBulkLoadResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final PrepareBulkLoadResponse defaultInstance; - public static PrepareBulkLoadResponse getDefaultInstance() { - return defaultInstance; } - - public PrepareBulkLoadResponse getDefaultInstanceForType() { - return defaultInstance; + private PrepareBulkLoadResponse() { + bulkToken_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private PrepareBulkLoadResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -25260,8 +25759,9 @@ public final class ClientProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - bulkToken_ = input.readBytes(); + bulkToken_ = bs; break; } } @@ -25270,7 +25770,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -25281,32 +25781,16 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public PrepareBulkLoadResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new PrepareBulkLoadResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string bulk_token = 1; public static final int BULK_TOKEN_FIELD_NUMBER = 1; - private java.lang.Object bulkToken_; + private volatile java.lang.Object bulkToken_; /** * required string bulk_token = 1; */ @@ -25347,13 +25831,11 @@ public final class ClientProtos { } } - private void initFields() { - bulkToken_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasBulkToken()) { memoizedIsInitialized = 0; @@ -25365,36 +25847,27 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getBulkTokenBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, bulkToken_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getBulkTokenBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, bulkToken_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -25410,12 +25883,10 @@ public final class ClientProtos { result = result && getBulkToken() .equals(other.getBulkToken()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -25427,7 +25898,7 @@ public final class ClientProtos { hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getBulkToken().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -25455,46 +25926,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -25502,14 +25984,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.PrepareBulkLoadResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.PrepareBulkLoadResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -25522,18 +26005,15 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); bulkToken_ = ""; @@ -25541,10 +26021,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; @@ -25575,6 +26051,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse)other); @@ -25591,13 +26093,13 @@ public final class ClientProtos { bulkToken_ = other.bulkToken_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasBulkToken()) { - return false; } return true; @@ -25612,7 +26114,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -25622,7 +26124,6 @@ public final class ClientProtos { } private int bitField0_; - // required string bulk_token = 1; private java.lang.Object bulkToken_ = ""; /** * required string bulk_token = 1; @@ -25636,9 +26137,12 @@ public final class ClientProtos { public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - bulkToken_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + bulkToken_ = s; + } return s; } else { return (java.lang.String) ref; @@ -25695,22 +26199,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.PrepareBulkLoadResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse DEFAULT_INSTANCE; static { - defaultInstance = new PrepareBulkLoadResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public PrepareBulkLoadResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrepareBulkLoadResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadResponse) } - public interface CleanupBulkLoadRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CleanupBulkLoadRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CleanupBulkLoadRequest) + com.google.protobuf.MessageOrBuilder { - // required string bulk_token = 1; /** * required string bulk_token = 1; */ @@ -25725,7 +26266,6 @@ public final class ClientProtos { com.google.protobuf.ByteString getBulkTokenBytes(); - // optional .hbase.pb.RegionSpecifier region = 2; /** * optional .hbase.pb.RegionSpecifier region = 2; */ @@ -25742,36 +26282,28 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.CleanupBulkLoadRequest} */ - public static final class CleanupBulkLoadRequest extends - com.google.protobuf.GeneratedMessage - implements CleanupBulkLoadRequestOrBuilder { + public static final class CleanupBulkLoadRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CleanupBulkLoadRequest) + CleanupBulkLoadRequestOrBuilder { // Use CleanupBulkLoadRequest.newBuilder() to construct. - private CleanupBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private CleanupBulkLoadRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CleanupBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CleanupBulkLoadRequest defaultInstance; - public static CleanupBulkLoadRequest getDefaultInstance() { - return defaultInstance; - } - - public CleanupBulkLoadRequest getDefaultInstanceForType() { - return defaultInstance; + private CleanupBulkLoadRequest() { + bulkToken_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CleanupBulkLoadRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -25791,8 +26323,9 @@ public final class ClientProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - bulkToken_ = input.readBytes(); + bulkToken_ = bs; break; } case 18: { @@ -25814,7 +26347,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -25825,32 +26358,16 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CleanupBulkLoadRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CleanupBulkLoadRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string bulk_token = 1; public static final int BULK_TOKEN_FIELD_NUMBER = 1; - private java.lang.Object bulkToken_; + private volatile java.lang.Object bulkToken_; /** * required string bulk_token = 1; */ @@ -25891,7 +26408,6 @@ public final class ClientProtos { } } - // optional .hbase.pb.RegionSpecifier region = 2; public static final int REGION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -25904,23 +26420,20 @@ public final class ClientProtos { * optional .hbase.pb.RegionSpecifier region = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * optional .hbase.pb.RegionSpecifier region = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - private void initFields() { - bulkToken_ = ""; - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasBulkToken()) { memoizedIsInitialized = 0; @@ -25938,43 +26451,34 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getBulkTokenBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, bulkToken_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, region_); + output.writeMessage(2, getRegion()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getBulkTokenBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, bulkToken_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, region_); + .computeMessageSize(2, getRegion()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -25995,12 +26499,10 @@ public final class ClientProtos { result = result && getRegion() .equals(other.getRegion()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -26016,7 +26518,7 @@ public final class ClientProtos { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -26044,46 +26546,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -26091,14 +26604,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.CleanupBulkLoadRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CleanupBulkLoadRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -26111,25 +26625,22 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); bulkToken_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -26137,10 +26648,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; @@ -26179,6 +26686,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)other); @@ -26198,18 +26731,17 @@ public final class ClientProtos { if (other.hasRegion()) { mergeRegion(other.getRegion()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasBulkToken()) { - return false; } if (hasRegion()) { if (!getRegion().isInitialized()) { - return false; } } @@ -26225,7 +26757,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -26235,7 +26767,6 @@ public final class ClientProtos { } private int bitField0_; - // required string bulk_token = 1; private java.lang.Object bulkToken_ = ""; /** * required string bulk_token = 1; @@ -26249,9 +26780,12 @@ public final class ClientProtos { public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - bulkToken_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + bulkToken_ = s; + } return s; } else { return (java.lang.String) ref; @@ -26309,9 +26843,8 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.RegionSpecifier region = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * optional .hbase.pb.RegionSpecifier region = 2; @@ -26324,7 +26857,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -26365,6 +26898,7 @@ public final class ClientProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -26383,7 +26917,7 @@ public final class ClientProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -26406,73 +26940,103 @@ public final class ClientProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * optional .hbase.pb.RegionSpecifier region = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CleanupBulkLoadRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest DEFAULT_INSTANCE; static { - defaultInstance = new CleanupBulkLoadRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CleanupBulkLoadRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CleanupBulkLoadRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadRequest) } - public interface CleanupBulkLoadResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CleanupBulkLoadResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CleanupBulkLoadResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.CleanupBulkLoadResponse} */ - public static final class CleanupBulkLoadResponse extends - com.google.protobuf.GeneratedMessage - implements CleanupBulkLoadResponseOrBuilder { + public static final class CleanupBulkLoadResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CleanupBulkLoadResponse) + CleanupBulkLoadResponseOrBuilder { // Use CleanupBulkLoadResponse.newBuilder() to construct. - private CleanupBulkLoadResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private CleanupBulkLoadResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CleanupBulkLoadResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CleanupBulkLoadResponse defaultInstance; - public static CleanupBulkLoadResponse getDefaultInstance() { - return defaultInstance; - } - - public CleanupBulkLoadResponse getDefaultInstanceForType() { - return defaultInstance; + private CleanupBulkLoadResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CleanupBulkLoadResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -26496,7 +27060,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -26507,34 +27071,18 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CleanupBulkLoadResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CleanupBulkLoadResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -26542,29 +27090,21 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -26575,12 +27115,10 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -26588,7 +27126,7 @@ public final class ClientProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -26616,46 +27154,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -26663,14 +27212,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.CleanupBulkLoadResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CleanupBulkLoadResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -26683,27 +27233,20 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; @@ -26727,6 +27270,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse)other); @@ -26738,7 +27307,8 @@ public final class ClientProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -26755,7 +27325,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -26763,22 +27333,59 @@ public final class ClientProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CleanupBulkLoadResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse DEFAULT_INSTANCE; static { - defaultInstance = new CleanupBulkLoadResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CleanupBulkLoadResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CleanupBulkLoadResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadResponse) } - public interface CoprocessorServiceCallOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CoprocessorServiceCallOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CoprocessorServiceCall) + com.google.protobuf.MessageOrBuilder { - // required bytes row = 1; /** * required bytes row = 1; */ @@ -26788,7 +27395,6 @@ public final class ClientProtos { */ com.google.protobuf.ByteString getRow(); - // required string service_name = 2; /** * required string service_name = 2; */ @@ -26803,7 +27409,6 @@ public final class ClientProtos { com.google.protobuf.ByteString getServiceNameBytes(); - // required string method_name = 3; /** * required string method_name = 3; */ @@ -26818,7 +27423,6 @@ public final class ClientProtos { com.google.protobuf.ByteString getMethodNameBytes(); - // required bytes request = 4; /** * required bytes request = 4; */ @@ -26831,36 +27435,31 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.CoprocessorServiceCall} */ - public static final class CoprocessorServiceCall extends - com.google.protobuf.GeneratedMessage - implements CoprocessorServiceCallOrBuilder { + public static final class CoprocessorServiceCall extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CoprocessorServiceCall) + CoprocessorServiceCallOrBuilder { // Use CoprocessorServiceCall.newBuilder() to construct. - private CoprocessorServiceCall(com.google.protobuf.GeneratedMessage.Builder builder) { + private CoprocessorServiceCall(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CoprocessorServiceCall(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CoprocessorServiceCall defaultInstance; - public static CoprocessorServiceCall getDefaultInstance() { - return defaultInstance; - } - - public CoprocessorServiceCall getDefaultInstanceForType() { - return defaultInstance; + private CoprocessorServiceCall() { + row_ = com.google.protobuf.ByteString.EMPTY; + serviceName_ = ""; + methodName_ = ""; + request_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CoprocessorServiceCall( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -26885,13 +27484,15 @@ public final class ClientProtos { break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - serviceName_ = input.readBytes(); + serviceName_ = bs; break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - methodName_ = input.readBytes(); + methodName_ = bs; break; } case 34: { @@ -26905,7 +27506,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -26916,30 +27517,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CoprocessorServiceCall parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CoprocessorServiceCall(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString row_; /** @@ -26955,9 +27540,8 @@ public final class ClientProtos { return row_; } - // required string service_name = 2; public static final int SERVICE_NAME_FIELD_NUMBER = 2; - private java.lang.Object serviceName_; + private volatile java.lang.Object serviceName_; /** * required string service_name = 2; */ @@ -26998,9 +27582,8 @@ public final class ClientProtos { } } - // required string method_name = 3; public static final int METHOD_NAME_FIELD_NUMBER = 3; - private java.lang.Object methodName_; + private volatile java.lang.Object methodName_; /** * required string method_name = 3; */ @@ -27041,7 +27624,6 @@ public final class ClientProtos { } } - // required bytes request = 4; public static final int REQUEST_FIELD_NUMBER = 4; private com.google.protobuf.ByteString request_; /** @@ -27057,16 +27639,11 @@ public final class ClientProtos { return request_; } - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - serviceName_ = ""; - methodName_ = ""; - request_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRow()) { memoizedIsInitialized = 0; @@ -27090,25 +27667,23 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, row_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getServiceNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, serviceName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getMethodNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, methodName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, request_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -27117,30 +27692,22 @@ public final class ClientProtos { .computeBytesSize(1, row_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getServiceNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, serviceName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getMethodNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, methodName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(4, request_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -27171,12 +27738,10 @@ public final class ClientProtos { result = result && getRequest() .equals(other.getRequest()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -27200,7 +27765,7 @@ public final class ClientProtos { hash = (37 * hash) + REQUEST_FIELD_NUMBER; hash = (53 * hash) + getRequest().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -27228,46 +27793,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -27275,14 +27851,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.CoprocessorServiceCall} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CoprocessorServiceCall) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -27295,18 +27872,15 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; @@ -27320,10 +27894,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor; @@ -27366,6 +27936,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall)other); @@ -27393,25 +27989,22 @@ public final class ClientProtos { if (other.hasRequest()) { setRequest(other.getRequest()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRow()) { - return false; } if (!hasServiceName()) { - return false; } if (!hasMethodName()) { - return false; } if (!hasRequest()) { - return false; } return true; @@ -27426,7 +28019,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -27436,7 +28029,6 @@ public final class ClientProtos { } private int bitField0_; - // required bytes row = 1; private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes row = 1; @@ -27472,7 +28064,6 @@ public final class ClientProtos { return this; } - // required string service_name = 2; private java.lang.Object serviceName_ = ""; /** * required string service_name = 2; @@ -27486,9 +28077,12 @@ public final class ClientProtos { public java.lang.String getServiceName() { java.lang.Object ref = serviceName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - serviceName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + serviceName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -27546,7 +28140,6 @@ public final class ClientProtos { return this; } - // required string method_name = 3; private java.lang.Object methodName_ = ""; /** * required string method_name = 3; @@ -27560,9 +28153,12 @@ public final class ClientProtos { public java.lang.String getMethodName() { java.lang.Object ref = methodName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - methodName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + methodName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -27620,7 +28216,6 @@ public final class ClientProtos { return this; } - // required bytes request = 4; private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes request = 4; @@ -27655,22 +28250,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceCall) } + // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceCall) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall DEFAULT_INSTANCE; static { - defaultInstance = new CoprocessorServiceCall(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CoprocessorServiceCall parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CoprocessorServiceCall(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceCall) } - public interface CoprocessorServiceResultOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CoprocessorServiceResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CoprocessorServiceResult) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.NameBytesPair value = 1; /** * optional .hbase.pb.NameBytesPair value = 1; */ @@ -27687,36 +28319,27 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.CoprocessorServiceResult} */ - public static final class CoprocessorServiceResult extends - com.google.protobuf.GeneratedMessage - implements CoprocessorServiceResultOrBuilder { + public static final class CoprocessorServiceResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CoprocessorServiceResult) + CoprocessorServiceResultOrBuilder { // Use CoprocessorServiceResult.newBuilder() to construct. - private CoprocessorServiceResult(com.google.protobuf.GeneratedMessage.Builder builder) { + private CoprocessorServiceResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CoprocessorServiceResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CoprocessorServiceResult defaultInstance; - public static CoprocessorServiceResult getDefaultInstance() { - return defaultInstance; - } - - public CoprocessorServiceResult getDefaultInstanceForType() { - return defaultInstance; + private CoprocessorServiceResult() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CoprocessorServiceResult( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -27754,7 +28377,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -27765,30 +28388,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CoprocessorServiceResult parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CoprocessorServiceResult(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.NameBytesPair value = 1; public static final int VALUE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value_; /** @@ -27801,22 +28408,20 @@ public final class ClientProtos { * optional .hbase.pb.NameBytesPair value = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getValue() { - return value_; + return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } /** * optional .hbase.pb.NameBytesPair value = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { - return value_; + return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } - private void initFields() { - value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasValue()) { if (!getValue().isInitialized()) { @@ -27830,36 +28435,28 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, value_); + output.writeMessage(1, getValue()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, value_); + .computeMessageSize(1, getValue()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -27875,12 +28472,10 @@ public final class ClientProtos { result = result && getValue() .equals(other.getValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -27892,7 +28487,7 @@ public final class ClientProtos { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -27920,46 +28515,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -27967,14 +28573,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.CoprocessorServiceResult} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CoprocessorServiceResult) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -27987,23 +28594,20 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getValueFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + value_ = null; } else { valueBuilder_.clear(); } @@ -28011,10 +28615,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor; @@ -28049,6 +28649,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult)other); @@ -28063,14 +28689,14 @@ public final class ClientProtos { if (other.hasValue()) { mergeValue(other.getValue()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasValue()) { if (!getValue().isInitialized()) { - return false; } } @@ -28086,7 +28712,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -28096,9 +28722,8 @@ public final class ClientProtos { } private int bitField0_; - // optional .hbase.pb.NameBytesPair value = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; /** * optional .hbase.pb.NameBytesPair value = 1; @@ -28111,7 +28736,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getValue() { if (valueBuilder_ == null) { - return value_; + return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } else { return valueBuilder_.getMessage(); } @@ -28152,6 +28777,7 @@ public final class ClientProtos { public Builder mergeValue(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (valueBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + value_ != null && value_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); @@ -28170,7 +28796,7 @@ public final class ClientProtos { */ public Builder clearValue() { if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + value_ = null; onChanged(); } else { valueBuilder_.clear(); @@ -28193,41 +28819,79 @@ public final class ClientProtos { if (valueBuilder_ != null) { return valueBuilder_.getMessageOrBuilder(); } else { - return value_; + return value_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } } /** * optional .hbase.pb.NameBytesPair value = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getValueFieldBuilder() { if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< + valueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - value_, + getValue(), getParentForChildren(), isClean()); value_ = null; } return valueBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceResult) } + // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceResult) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult DEFAULT_INSTANCE; static { - defaultInstance = new CoprocessorServiceResult(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CoprocessorServiceResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CoprocessorServiceResult(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceResult) } - public interface CoprocessorServiceRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CoprocessorServiceRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CoprocessorServiceRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -28241,7 +28905,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // required .hbase.pb.CoprocessorServiceCall call = 2; /** * required .hbase.pb.CoprocessorServiceCall call = 2; */ @@ -28258,36 +28921,27 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.CoprocessorServiceRequest} */ - public static final class CoprocessorServiceRequest extends - com.google.protobuf.GeneratedMessage - implements CoprocessorServiceRequestOrBuilder { + public static final class CoprocessorServiceRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CoprocessorServiceRequest) + CoprocessorServiceRequestOrBuilder { // Use CoprocessorServiceRequest.newBuilder() to construct. - private CoprocessorServiceRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private CoprocessorServiceRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CoprocessorServiceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CoprocessorServiceRequest defaultInstance; - public static CoprocessorServiceRequest getDefaultInstance() { - return defaultInstance; - } - - public CoprocessorServiceRequest getDefaultInstanceForType() { - return defaultInstance; + private CoprocessorServiceRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CoprocessorServiceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -28338,7 +28992,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -28349,30 +29003,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CoprocessorServiceRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CoprocessorServiceRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -28385,16 +29023,15 @@ public final class ClientProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // required .hbase.pb.CoprocessorServiceCall call = 2; public static final int CALL_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall call_; /** @@ -28407,23 +29044,20 @@ public final class ClientProtos { * required .hbase.pb.CoprocessorServiceCall call = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() { - return call_; + return call_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : call_; } /** * required .hbase.pb.CoprocessorServiceCall call = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() { - return call_; + return call_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : call_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - call_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -28447,43 +29081,35 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, call_); + output.writeMessage(2, getCall()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, call_); + .computeMessageSize(2, getCall()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -28504,12 +29130,10 @@ public final class ClientProtos { result = result && getCall() .equals(other.getCall()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -28525,7 +29149,7 @@ public final class ClientProtos { hash = (37 * hash) + CALL_FIELD_NUMBER; hash = (53 * hash) + getCall().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -28553,46 +29177,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -28600,14 +29235,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.CoprocessorServiceRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CoprocessorServiceRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -28620,30 +29256,27 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getCallFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (callBuilder_ == null) { - call_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); + call_ = null; } else { callBuilder_.clear(); } @@ -28651,10 +29284,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor; @@ -28697,6 +29326,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)other); @@ -28714,25 +29369,22 @@ public final class ClientProtos { if (other.hasCall()) { mergeCall(other.getCall()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!hasCall()) { - return false; } if (!getRegion().isInitialized()) { - return false; } if (!getCall().isInitialized()) { - return false; } return true; @@ -28747,7 +29399,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -28757,9 +29409,8 @@ public final class ClientProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -28772,7 +29423,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -28813,6 +29464,7 @@ public final class ClientProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -28831,7 +29483,7 @@ public final class ClientProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -28854,19 +29506,20 @@ public final class ClientProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -28874,9 +29527,8 @@ public final class ClientProtos { return regionBuilder_; } - // required .hbase.pb.CoprocessorServiceCall call = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall call_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall call_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> callBuilder_; /** * required .hbase.pb.CoprocessorServiceCall call = 2; @@ -28889,7 +29541,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() { if (callBuilder_ == null) { - return call_; + return call_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : call_; } else { return callBuilder_.getMessage(); } @@ -28930,6 +29582,7 @@ public final class ClientProtos { public Builder mergeCall(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { if (callBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + call_ != null && call_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) { call_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(call_).mergeFrom(value).buildPartial(); @@ -28948,7 +29601,7 @@ public final class ClientProtos { */ public Builder clearCall() { if (callBuilder_ == null) { - call_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); + call_ = null; onChanged(); } else { callBuilder_.clear(); @@ -28971,41 +29624,79 @@ public final class ClientProtos { if (callBuilder_ != null) { return callBuilder_.getMessageOrBuilder(); } else { - return call_; + return call_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : call_; } } /** * required .hbase.pb.CoprocessorServiceCall call = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> getCallFieldBuilder() { if (callBuilder_ == null) { - callBuilder_ = new com.google.protobuf.SingleFieldBuilder< + callBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>( - call_, + getCall(), getParentForChildren(), isClean()); call_ = null; } return callBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest DEFAULT_INSTANCE; static { - defaultInstance = new CoprocessorServiceRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CoprocessorServiceRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CoprocessorServiceRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceRequest) } - public interface CoprocessorServiceResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CoprocessorServiceResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CoprocessorServiceResponse) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -29019,7 +29710,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // required .hbase.pb.NameBytesPair value = 2; /** * required .hbase.pb.NameBytesPair value = 2; */ @@ -29036,36 +29726,27 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.CoprocessorServiceResponse} */ - public static final class CoprocessorServiceResponse extends - com.google.protobuf.GeneratedMessage - implements CoprocessorServiceResponseOrBuilder { + public static final class CoprocessorServiceResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CoprocessorServiceResponse) + CoprocessorServiceResponseOrBuilder { // Use CoprocessorServiceResponse.newBuilder() to construct. - private CoprocessorServiceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private CoprocessorServiceResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CoprocessorServiceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CoprocessorServiceResponse defaultInstance; - public static CoprocessorServiceResponse getDefaultInstance() { - return defaultInstance; - } - - public CoprocessorServiceResponse getDefaultInstanceForType() { - return defaultInstance; + private CoprocessorServiceResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CoprocessorServiceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -29116,7 +29797,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -29127,30 +29808,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CoprocessorServiceResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CoprocessorServiceResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -29163,16 +29828,15 @@ public final class ClientProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // required .hbase.pb.NameBytesPair value = 2; public static final int VALUE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value_; /** @@ -29185,23 +29849,20 @@ public final class ClientProtos { * required .hbase.pb.NameBytesPair value = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getValue() { - return value_; + return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } /** * required .hbase.pb.NameBytesPair value = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { - return value_; + return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -29225,43 +29886,35 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, value_); + output.writeMessage(2, getValue()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, value_); + .computeMessageSize(2, getValue()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -29282,12 +29935,10 @@ public final class ClientProtos { result = result && getValue() .equals(other.getValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -29303,7 +29954,7 @@ public final class ClientProtos { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -29331,46 +29982,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -29378,14 +30040,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.CoprocessorServiceResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CoprocessorServiceResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -29398,30 +30061,27 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getValueFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + value_ = null; } else { valueBuilder_.clear(); } @@ -29429,10 +30089,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor; @@ -29475,6 +30131,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse)other); @@ -29492,25 +30174,22 @@ public final class ClientProtos { if (other.hasValue()) { mergeValue(other.getValue()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!hasValue()) { - return false; } if (!getRegion().isInitialized()) { - return false; } if (!getValue().isInitialized()) { - return false; } return true; @@ -29525,7 +30204,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -29535,9 +30214,8 @@ public final class ClientProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -29550,7 +30228,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -29591,6 +30269,7 @@ public final class ClientProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -29609,7 +30288,7 @@ public final class ClientProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -29632,19 +30311,20 @@ public final class ClientProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -29652,9 +30332,8 @@ public final class ClientProtos { return regionBuilder_; } - // required .hbase.pb.NameBytesPair value = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; /** * required .hbase.pb.NameBytesPair value = 2; @@ -29667,7 +30346,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getValue() { if (valueBuilder_ == null) { - return value_; + return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } else { return valueBuilder_.getMessage(); } @@ -29708,6 +30387,7 @@ public final class ClientProtos { public Builder mergeValue(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (valueBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + value_ != null && value_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); @@ -29726,7 +30406,7 @@ public final class ClientProtos { */ public Builder clearValue() { if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + value_ = null; onChanged(); } else { valueBuilder_.clear(); @@ -29749,61 +30429,98 @@ public final class ClientProtos { if (valueBuilder_ != null) { return valueBuilder_.getMessageOrBuilder(); } else { - return value_; + return value_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } } /** * required .hbase.pb.NameBytesPair value = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getValueFieldBuilder() { if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< + valueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - value_, + getValue(), getParentForChildren(), isClean()); value_ = null; } return valueBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse DEFAULT_INSTANCE; static { - defaultInstance = new CoprocessorServiceResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CoprocessorServiceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CoprocessorServiceResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceResponse) } - public interface ActionOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ActionOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Action) + com.google.protobuf.MessageOrBuilder { - // optional uint32 index = 1; /** - * optional uint32 index = 1; - * *
      * If part of a multi action, useful aligning
      * result with what was originally submitted.
      * 
+ * + * optional uint32 index = 1; */ boolean hasIndex(); /** - * optional uint32 index = 1; - * *
      * If part of a multi action, useful aligning
      * result with what was originally submitted.
      * 
+ * + * optional uint32 index = 1; */ int getIndex(); - // optional .hbase.pb.MutationProto mutation = 2; /** * optional .hbase.pb.MutationProto mutation = 2; */ @@ -29817,7 +30534,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder(); - // optional .hbase.pb.Get get = 3; /** * optional .hbase.pb.Get get = 3; */ @@ -29831,7 +30547,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); - // optional .hbase.pb.CoprocessorServiceCall service_call = 4; /** * optional .hbase.pb.CoprocessorServiceCall service_call = 4; */ @@ -29846,42 +30561,34 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder(); } /** - * Protobuf type {@code hbase.pb.Action} - * *
    * Either a Get or a Mutation
    * 
+ * + * Protobuf type {@code hbase.pb.Action} */ - public static final class Action extends - com.google.protobuf.GeneratedMessage - implements ActionOrBuilder { + public static final class Action extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Action) + ActionOrBuilder { // Use Action.newBuilder() to construct. - private Action(com.google.protobuf.GeneratedMessage.Builder builder) { + private Action(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private Action(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Action defaultInstance; - public static Action getDefaultInstance() { - return defaultInstance; - } - - public Action getDefaultInstanceForType() { - return defaultInstance; + private Action() { + index_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Action( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -29950,7 +30657,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -29961,56 +30668,39 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Action parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Action(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint32 index = 1; public static final int INDEX_FIELD_NUMBER = 1; private int index_; /** - * optional uint32 index = 1; - * *
      * If part of a multi action, useful aligning
      * result with what was originally submitted.
      * 
+ * + * optional uint32 index = 1; */ public boolean hasIndex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint32 index = 1; - * *
      * If part of a multi action, useful aligning
      * result with what was originally submitted.
      * 
+ * + * optional uint32 index = 1; */ public int getIndex() { return index_; } - // optional .hbase.pb.MutationProto mutation = 2; public static final int MUTATION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto mutation_; /** @@ -30023,16 +30713,15 @@ public final class ClientProtos { * optional .hbase.pb.MutationProto mutation = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getMutation() { - return mutation_; + return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } /** * optional .hbase.pb.MutationProto mutation = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { - return mutation_; + return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } - // optional .hbase.pb.Get get = 3; public static final int GET_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get get_; /** @@ -30045,16 +30734,15 @@ public final class ClientProtos { * optional .hbase.pb.Get get = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getGet() { - return get_; + return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } /** * optional .hbase.pb.Get get = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { - return get_; + return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } - // optional .hbase.pb.CoprocessorServiceCall service_call = 4; public static final int SERVICE_CALL_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_; /** @@ -30067,25 +30755,20 @@ public final class ClientProtos { * optional .hbase.pb.CoprocessorServiceCall service_call = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() { - return serviceCall_; + return serviceCall_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : serviceCall_; } /** * optional .hbase.pb.CoprocessorServiceCall service_call = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() { - return serviceCall_; + return serviceCall_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : serviceCall_; } - private void initFields() { - index_ = 0; - mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); - get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - serviceCall_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasMutation()) { if (!getMutation().isInitialized()) { @@ -30111,25 +30794,23 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, index_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, mutation_); + output.writeMessage(2, getMutation()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, get_); + output.writeMessage(3, getGet()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, serviceCall_); + output.writeMessage(4, getServiceCall()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -30139,29 +30820,23 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, mutation_); + .computeMessageSize(2, getMutation()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, get_); + .computeMessageSize(3, getGet()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, serviceCall_); + .computeMessageSize(4, getServiceCall()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -30192,12 +30867,10 @@ public final class ClientProtos { result = result && getServiceCall() .equals(other.getServiceCall()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -30221,7 +30894,7 @@ public final class ClientProtos { hash = (37 * hash) + SERVICE_CALL_FIELD_NUMBER; hash = (53 * hash) + getServiceCall().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -30249,65 +30922,77 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.Action} - * *
      * Either a Get or a Mutation
      * 
+ * + * Protobuf type {@code hbase.pb.Action} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Action) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -30320,39 +31005,36 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getMutationFieldBuilder(); getGetFieldBuilder(); getServiceCallFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); index_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (mutationBuilder_ == null) { - mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); + mutation_ = null; } else { mutationBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + get_ = null; } else { getBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); if (serviceCallBuilder_ == null) { - serviceCall_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); + serviceCall_ = null; } else { serviceCallBuilder_.clear(); } @@ -30360,10 +31042,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor; @@ -30418,6 +31096,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action)other); @@ -30441,26 +31145,24 @@ public final class ClientProtos { if (other.hasServiceCall()) { mergeServiceCall(other.getServiceCall()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasMutation()) { if (!getMutation().isInitialized()) { - return false; } } if (hasGet()) { if (!getGet().isInitialized()) { - return false; } } if (hasServiceCall()) { if (!getServiceCall().isInitialized()) { - return false; } } @@ -30476,7 +31178,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -30486,37 +31188,36 @@ public final class ClientProtos { } private int bitField0_; - // optional uint32 index = 1; private int index_ ; /** - * optional uint32 index = 1; - * *
        * If part of a multi action, useful aligning
        * result with what was originally submitted.
        * 
+ * + * optional uint32 index = 1; */ public boolean hasIndex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint32 index = 1; - * *
        * If part of a multi action, useful aligning
        * result with what was originally submitted.
        * 
+ * + * optional uint32 index = 1; */ public int getIndex() { return index_; } /** - * optional uint32 index = 1; - * *
        * If part of a multi action, useful aligning
        * result with what was originally submitted.
        * 
+ * + * optional uint32 index = 1; */ public Builder setIndex(int value) { bitField0_ |= 0x00000001; @@ -30525,12 +31226,12 @@ public final class ClientProtos { return this; } /** - * optional uint32 index = 1; - * *
        * If part of a multi action, useful aligning
        * result with what was originally submitted.
        * 
+ * + * optional uint32 index = 1; */ public Builder clearIndex() { bitField0_ = (bitField0_ & ~0x00000001); @@ -30539,9 +31240,8 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.MutationProto mutation = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto mutation_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_; /** * optional .hbase.pb.MutationProto mutation = 2; @@ -30554,7 +31254,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getMutation() { if (mutationBuilder_ == null) { - return mutation_; + return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } else { return mutationBuilder_.getMessage(); } @@ -30595,6 +31295,7 @@ public final class ClientProtos { public Builder mergeMutation(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto value) { if (mutationBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + mutation_ != null && mutation_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) { mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial(); @@ -30613,7 +31314,7 @@ public final class ClientProtos { */ public Builder clearMutation() { if (mutationBuilder_ == null) { - mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); + mutation_ = null; onChanged(); } else { mutationBuilder_.clear(); @@ -30636,19 +31337,20 @@ public final class ClientProtos { if (mutationBuilder_ != null) { return mutationBuilder_.getMessageOrBuilder(); } else { - return mutation_; + return mutation_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } } /** * optional .hbase.pb.MutationProto mutation = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder> getMutationFieldBuilder() { if (mutationBuilder_ == null) { - mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder< + mutationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder>( - mutation_, + getMutation(), getParentForChildren(), isClean()); mutation_ = null; @@ -30656,9 +31358,8 @@ public final class ClientProtos { return mutationBuilder_; } - // optional .hbase.pb.Get get = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get get_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; /** * optional .hbase.pb.Get get = 3; @@ -30671,7 +31372,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getGet() { if (getBuilder_ == null) { - return get_; + return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } else { return getBuilder_.getMessage(); } @@ -30712,6 +31413,7 @@ public final class ClientProtos { public Builder mergeGet(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + get_ != null && get_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); @@ -30730,7 +31432,7 @@ public final class ClientProtos { */ public Builder clearGet() { if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + get_ = null; onChanged(); } else { getBuilder_.clear(); @@ -30753,19 +31455,20 @@ public final class ClientProtos { if (getBuilder_ != null) { return getBuilder_.getMessageOrBuilder(); } else { - return get_; + return get_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } } /** * optional .hbase.pb.Get get = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder> getGetFieldBuilder() { if (getBuilder_ == null) { - getBuilder_ = new com.google.protobuf.SingleFieldBuilder< + getBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder>( - get_, + getGet(), getParentForChildren(), isClean()); get_ = null; @@ -30773,9 +31476,8 @@ public final class ClientProtos { return getBuilder_; } - // optional .hbase.pb.CoprocessorServiceCall service_call = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> serviceCallBuilder_; /** * optional .hbase.pb.CoprocessorServiceCall service_call = 4; @@ -30788,7 +31490,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() { if (serviceCallBuilder_ == null) { - return serviceCall_; + return serviceCall_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : serviceCall_; } else { return serviceCallBuilder_.getMessage(); } @@ -30829,6 +31531,7 @@ public final class ClientProtos { public Builder mergeServiceCall(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { if (serviceCallBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + serviceCall_ != null && serviceCall_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) { serviceCall_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(serviceCall_).mergeFrom(value).buildPartial(); @@ -30847,7 +31550,7 @@ public final class ClientProtos { */ public Builder clearServiceCall() { if (serviceCallBuilder_ == null) { - serviceCall_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); + serviceCall_ = null; onChanged(); } else { serviceCallBuilder_.clear(); @@ -30870,41 +31573,79 @@ public final class ClientProtos { if (serviceCallBuilder_ != null) { return serviceCallBuilder_.getMessageOrBuilder(); } else { - return serviceCall_; + return serviceCall_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : serviceCall_; } } /** * optional .hbase.pb.CoprocessorServiceCall service_call = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> getServiceCallFieldBuilder() { if (serviceCallBuilder_ == null) { - serviceCallBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serviceCallBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>( - serviceCall_, + getServiceCall(), getParentForChildren(), isClean()); serviceCall_ = null; } return serviceCallBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Action) } + // @@protoc_insertion_point(class_scope:hbase.pb.Action) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action DEFAULT_INSTANCE; static { - defaultInstance = new Action(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Action parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Action(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Action) } - public interface RegionActionOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionActionOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionAction) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -30918,25 +31659,23 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // optional bool atomic = 2; /** - * optional bool atomic = 2; - * *
      * When set, run mutations as atomic unit.
      * 
+ * + * optional bool atomic = 2; */ boolean hasAtomic(); /** - * optional bool atomic = 2; - * *
      * When set, run mutations as atomic unit.
      * 
+ * + * optional bool atomic = 2; */ boolean getAtomic(); - // repeated .hbase.pb.Action action = 3; /** * repeated .hbase.pb.Action action = 3; */ @@ -30962,43 +31701,36 @@ public final class ClientProtos { int index); } /** - * Protobuf type {@code hbase.pb.RegionAction} - * *
    **
    * Actions to run against a Region.
    * 
+ * + * Protobuf type {@code hbase.pb.RegionAction} */ - public static final class RegionAction extends - com.google.protobuf.GeneratedMessage - implements RegionActionOrBuilder { + public static final class RegionAction extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionAction) + RegionActionOrBuilder { // Use RegionAction.newBuilder() to construct. - private RegionAction(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionAction(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RegionAction(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionAction defaultInstance; - public static RegionAction getDefaultInstance() { - return defaultInstance; - } - - public RegionAction getDefaultInstanceForType() { - return defaultInstance; + private RegionAction() { + atomic_ = false; + action_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionAction( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -31040,7 +31772,8 @@ public final class ClientProtos { action_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - action_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.PARSER, extensionRegistry)); + action_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.PARSER, extensionRegistry)); break; } } @@ -31049,7 +31782,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { action_ = java.util.Collections.unmodifiableList(action_); @@ -31063,30 +31796,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionAction parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionAction(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -31099,40 +31816,38 @@ public final class ClientProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // optional bool atomic = 2; public static final int ATOMIC_FIELD_NUMBER = 2; private boolean atomic_; /** - * optional bool atomic = 2; - * *
      * When set, run mutations as atomic unit.
      * 
+ * + * optional bool atomic = 2; */ public boolean hasAtomic() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional bool atomic = 2; - * *
      * When set, run mutations as atomic unit.
      * 
+ * + * optional bool atomic = 2; */ public boolean getAtomic() { return atomic_; } - // repeated .hbase.pb.Action action = 3; public static final int ACTION_FIELD_NUMBER = 3; private java.util.List action_; /** @@ -31168,15 +31883,11 @@ public final class ClientProtos { return action_.get(index); } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - atomic_ = false; - action_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -31198,9 +31909,8 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, atomic_); @@ -31208,18 +31918,17 @@ public final class ClientProtos { for (int i = 0; i < action_.size(); i++) { output.writeMessage(3, action_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -31229,19 +31938,13 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, action_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -31264,12 +31967,10 @@ public final class ClientProtos { } result = result && getActionList() .equals(other.getActionList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -31283,13 +31984,14 @@ public final class ClientProtos { } if (hasAtomic()) { hash = (37 * hash) + ATOMIC_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getAtomic()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getAtomic()); } if (getActionCount() > 0) { hash = (37 * hash) + ACTION_FIELD_NUMBER; hash = (53 * hash) + getActionList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -31317,66 +32019,78 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RegionAction} - * *
      **
      * Actions to run against a Region.
      * 
+ * + * Protobuf type {@code hbase.pb.RegionAction} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionAction) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -31389,24 +32103,21 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getActionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -31422,10 +32133,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor; @@ -31473,6 +32180,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction)other); @@ -31509,29 +32242,27 @@ public final class ClientProtos { action_ = other.action_; bitField0_ = (bitField0_ & ~0x00000004); actionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getActionFieldBuilder() : null; } else { actionBuilder_.addAllMessages(other.action_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } for (int i = 0; i < getActionCount(); i++) { if (!getAction(i).isInitialized()) { - return false; } } @@ -31547,7 +32278,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -31557,9 +32288,8 @@ public final class ClientProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -31572,7 +32302,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -31613,6 +32343,7 @@ public final class ClientProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -31631,7 +32362,7 @@ public final class ClientProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -31654,19 +32385,20 @@ public final class ClientProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -31674,34 +32406,33 @@ public final class ClientProtos { return regionBuilder_; } - // optional bool atomic = 2; private boolean atomic_ ; /** - * optional bool atomic = 2; - * *
        * When set, run mutations as atomic unit.
        * 
+ * + * optional bool atomic = 2; */ public boolean hasAtomic() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional bool atomic = 2; - * *
        * When set, run mutations as atomic unit.
        * 
+ * + * optional bool atomic = 2; */ public boolean getAtomic() { return atomic_; } /** - * optional bool atomic = 2; - * *
        * When set, run mutations as atomic unit.
        * 
+ * + * optional bool atomic = 2; */ public Builder setAtomic(boolean value) { bitField0_ |= 0x00000002; @@ -31710,11 +32441,11 @@ public final class ClientProtos { return this; } /** - * optional bool atomic = 2; - * *
        * When set, run mutations as atomic unit.
        * 
+ * + * optional bool atomic = 2; */ public Builder clearAtomic() { bitField0_ = (bitField0_ & ~0x00000002); @@ -31723,7 +32454,6 @@ public final class ClientProtos { return this; } - // repeated .hbase.pb.Action action = 3; private java.util.List action_ = java.util.Collections.emptyList(); private void ensureActionIsMutable() { @@ -31733,7 +32463,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder> actionBuilder_; /** @@ -31865,7 +32595,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (actionBuilder_ == null) { ensureActionIsMutable(); - super.addAll(values, action_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, action_); onChanged(); } else { actionBuilder_.addAllMessages(values); @@ -31948,11 +32679,11 @@ public final class ClientProtos { getActionBuilderList() { return getActionFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder> getActionFieldBuilder() { if (actionBuilder_ == null) { - actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder>( action_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -31962,115 +32693,143 @@ public final class ClientProtos { } return actionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionAction) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionAction) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction DEFAULT_INSTANCE; static { - defaultInstance = new RegionAction(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionAction parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionAction(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionAction) } - public interface RegionLoadStatsOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionLoadStatsOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionLoadStats) + com.google.protobuf.MessageOrBuilder { - // optional int32 memstoreLoad = 1 [default = 0]; /** - * optional int32 memstoreLoad = 1 [default = 0]; - * *
      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
      * 
+ * + * optional int32 memstoreLoad = 1 [default = 0]; */ boolean hasMemstoreLoad(); /** - * optional int32 memstoreLoad = 1 [default = 0]; - * *
      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
      * 
+ * + * optional int32 memstoreLoad = 1 [default = 0]; */ int getMemstoreLoad(); - // optional int32 heapOccupancy = 2 [default = 0]; /** - * optional int32 heapOccupancy = 2 [default = 0]; - * *
      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
      * We can move this to "ServerLoadStats" should we develop them.
      * 
+ * + * optional int32 heapOccupancy = 2 [default = 0]; */ boolean hasHeapOccupancy(); /** - * optional int32 heapOccupancy = 2 [default = 0]; - * *
      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
      * We can move this to "ServerLoadStats" should we develop them.
      * 
+ * + * optional int32 heapOccupancy = 2 [default = 0]; */ int getHeapOccupancy(); - // optional int32 compactionPressure = 3 [default = 0]; /** - * optional int32 compactionPressure = 3 [default = 0]; - * *
      * Compaction pressure. Guaranteed to be positive, between 0 and 100.
      * 
+ * + * optional int32 compactionPressure = 3 [default = 0]; */ boolean hasCompactionPressure(); /** - * optional int32 compactionPressure = 3 [default = 0]; - * *
      * Compaction pressure. Guaranteed to be positive, between 0 and 100.
      * 
+ * + * optional int32 compactionPressure = 3 [default = 0]; */ int getCompactionPressure(); } /** - * Protobuf type {@code hbase.pb.RegionLoadStats} - * *
-   *
    * Statistics about the current load on the region
    * 
+ * + * Protobuf type {@code hbase.pb.RegionLoadStats} */ - public static final class RegionLoadStats extends - com.google.protobuf.GeneratedMessage - implements RegionLoadStatsOrBuilder { + public static final class RegionLoadStats extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionLoadStats) + RegionLoadStatsOrBuilder { // Use RegionLoadStats.newBuilder() to construct. - private RegionLoadStats(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionLoadStats(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionLoadStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionLoadStats defaultInstance; - public static RegionLoadStats getDefaultInstance() { - return defaultInstance; } - - public RegionLoadStats getDefaultInstanceForType() { - return defaultInstance; + private RegionLoadStats() { + memstoreLoad_ = 0; + heapOccupancy_ = 0; + compactionPressure_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionLoadStats( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -32110,7 +32869,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -32121,112 +32880,90 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionLoadStats parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionLoadStats(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional int32 memstoreLoad = 1 [default = 0]; public static final int MEMSTORELOAD_FIELD_NUMBER = 1; private int memstoreLoad_; /** - * optional int32 memstoreLoad = 1 [default = 0]; - * *
      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
      * 
+ * + * optional int32 memstoreLoad = 1 [default = 0]; */ public boolean hasMemstoreLoad() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional int32 memstoreLoad = 1 [default = 0]; - * *
      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
      * 
+ * + * optional int32 memstoreLoad = 1 [default = 0]; */ public int getMemstoreLoad() { return memstoreLoad_; } - // optional int32 heapOccupancy = 2 [default = 0]; public static final int HEAPOCCUPANCY_FIELD_NUMBER = 2; private int heapOccupancy_; /** - * optional int32 heapOccupancy = 2 [default = 0]; - * *
      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
      * We can move this to "ServerLoadStats" should we develop them.
      * 
+ * + * optional int32 heapOccupancy = 2 [default = 0]; */ public boolean hasHeapOccupancy() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional int32 heapOccupancy = 2 [default = 0]; - * *
      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
      * We can move this to "ServerLoadStats" should we develop them.
      * 
+ * + * optional int32 heapOccupancy = 2 [default = 0]; */ public int getHeapOccupancy() { return heapOccupancy_; } - // optional int32 compactionPressure = 3 [default = 0]; public static final int COMPACTIONPRESSURE_FIELD_NUMBER = 3; private int compactionPressure_; /** - * optional int32 compactionPressure = 3 [default = 0]; - * *
      * Compaction pressure. Guaranteed to be positive, between 0 and 100.
      * 
+ * + * optional int32 compactionPressure = 3 [default = 0]; */ public boolean hasCompactionPressure() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional int32 compactionPressure = 3 [default = 0]; - * *
      * Compaction pressure. Guaranteed to be positive, between 0 and 100.
      * 
+ * + * optional int32 compactionPressure = 3 [default = 0]; */ public int getCompactionPressure() { return compactionPressure_; } - private void initFields() { - memstoreLoad_ = 0; - heapOccupancy_ = 0; - compactionPressure_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -32234,7 +32971,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, memstoreLoad_); } @@ -32244,12 +32980,11 @@ public final class ClientProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt32(3, compactionPressure_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -32265,19 +33000,13 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, compactionPressure_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -32303,12 +33032,10 @@ public final class ClientProtos { result = result && (getCompactionPressure() == other.getCompactionPressure()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -32328,7 +33055,7 @@ public final class ClientProtos { hash = (37 * hash) + COMPACTIONPRESSURE_FIELD_NUMBER; hash = (53 * hash) + getCompactionPressure(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -32356,66 +33083,77 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RegionLoadStats} - * *
-     *
      * Statistics about the current load on the region
      * 
+ * + * Protobuf type {@code hbase.pb.RegionLoadStats} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionLoadStats) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -32428,18 +33166,15 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); memstoreLoad_ = 0; @@ -32451,10 +33186,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor; @@ -32493,6 +33224,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats)other); @@ -32513,7 +33270,8 @@ public final class ClientProtos { if (other.hasCompactionPressure()) { setCompactionPressure(other.getCompactionPressure()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -32530,7 +33288,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -32540,34 +33298,33 @@ public final class ClientProtos { } private int bitField0_; - // optional int32 memstoreLoad = 1 [default = 0]; private int memstoreLoad_ ; /** - * optional int32 memstoreLoad = 1 [default = 0]; - * *
        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
        * 
+ * + * optional int32 memstoreLoad = 1 [default = 0]; */ public boolean hasMemstoreLoad() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional int32 memstoreLoad = 1 [default = 0]; - * *
        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
        * 
+ * + * optional int32 memstoreLoad = 1 [default = 0]; */ public int getMemstoreLoad() { return memstoreLoad_; } /** - * optional int32 memstoreLoad = 1 [default = 0]; - * *
        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
        * 
+ * + * optional int32 memstoreLoad = 1 [default = 0]; */ public Builder setMemstoreLoad(int value) { bitField0_ |= 0x00000001; @@ -32576,11 +33333,11 @@ public final class ClientProtos { return this; } /** - * optional int32 memstoreLoad = 1 [default = 0]; - * *
        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
        * 
+ * + * optional int32 memstoreLoad = 1 [default = 0]; */ public Builder clearMemstoreLoad() { bitField0_ = (bitField0_ & ~0x00000001); @@ -32589,37 +33346,36 @@ public final class ClientProtos { return this; } - // optional int32 heapOccupancy = 2 [default = 0]; private int heapOccupancy_ ; /** - * optional int32 heapOccupancy = 2 [default = 0]; - * *
        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
        * We can move this to "ServerLoadStats" should we develop them.
        * 
+ * + * optional int32 heapOccupancy = 2 [default = 0]; */ public boolean hasHeapOccupancy() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional int32 heapOccupancy = 2 [default = 0]; - * *
        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
        * We can move this to "ServerLoadStats" should we develop them.
        * 
+ * + * optional int32 heapOccupancy = 2 [default = 0]; */ public int getHeapOccupancy() { return heapOccupancy_; } /** - * optional int32 heapOccupancy = 2 [default = 0]; - * *
        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
        * We can move this to "ServerLoadStats" should we develop them.
        * 
+ * + * optional int32 heapOccupancy = 2 [default = 0]; */ public Builder setHeapOccupancy(int value) { bitField0_ |= 0x00000002; @@ -32628,12 +33384,12 @@ public final class ClientProtos { return this; } /** - * optional int32 heapOccupancy = 2 [default = 0]; - * *
        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
        * We can move this to "ServerLoadStats" should we develop them.
        * 
+ * + * optional int32 heapOccupancy = 2 [default = 0]; */ public Builder clearHeapOccupancy() { bitField0_ = (bitField0_ & ~0x00000002); @@ -32642,34 +33398,33 @@ public final class ClientProtos { return this; } - // optional int32 compactionPressure = 3 [default = 0]; private int compactionPressure_ ; /** - * optional int32 compactionPressure = 3 [default = 0]; - * *
        * Compaction pressure. Guaranteed to be positive, between 0 and 100.
        * 
+ * + * optional int32 compactionPressure = 3 [default = 0]; */ public boolean hasCompactionPressure() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional int32 compactionPressure = 3 [default = 0]; - * *
        * Compaction pressure. Guaranteed to be positive, between 0 and 100.
        * 
+ * + * optional int32 compactionPressure = 3 [default = 0]; */ public int getCompactionPressure() { return compactionPressure_; } /** - * optional int32 compactionPressure = 3 [default = 0]; - * *
        * Compaction pressure. Guaranteed to be positive, between 0 and 100.
        * 
+ * + * optional int32 compactionPressure = 3 [default = 0]; */ public Builder setCompactionPressure(int value) { bitField0_ |= 0x00000004; @@ -32678,11 +33433,11 @@ public final class ClientProtos { return this; } /** - * optional int32 compactionPressure = 3 [default = 0]; - * *
        * Compaction pressure. Guaranteed to be positive, between 0 and 100.
        * 
+ * + * optional int32 compactionPressure = 3 [default = 0]; */ public Builder clearCompactionPressure() { bitField0_ = (bitField0_ & ~0x00000004); @@ -32690,22 +33445,59 @@ public final class ClientProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionLoadStats) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionLoadStats) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats DEFAULT_INSTANCE; static { - defaultInstance = new RegionLoadStats(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionLoadStats parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionLoadStats(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionLoadStats) } - public interface MultiRegionLoadStatsOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MultiRegionLoadStatsOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MultiRegionLoadStats) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.RegionSpecifier region = 1; /** * repeated .hbase.pb.RegionSpecifier region = 1; */ @@ -32730,7 +33522,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( int index); - // repeated .hbase.pb.RegionLoadStats stat = 2; /** * repeated .hbase.pb.RegionLoadStats stat = 2; */ @@ -32758,36 +33549,29 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.MultiRegionLoadStats} */ - public static final class MultiRegionLoadStats extends - com.google.protobuf.GeneratedMessage - implements MultiRegionLoadStatsOrBuilder { + public static final class MultiRegionLoadStats extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MultiRegionLoadStats) + MultiRegionLoadStatsOrBuilder { // Use MultiRegionLoadStats.newBuilder() to construct. - private MultiRegionLoadStats(com.google.protobuf.GeneratedMessage.Builder builder) { + private MultiRegionLoadStats(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MultiRegionLoadStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MultiRegionLoadStats defaultInstance; - public static MultiRegionLoadStats getDefaultInstance() { - return defaultInstance; } - - public MultiRegionLoadStats getDefaultInstanceForType() { - return defaultInstance; + private MultiRegionLoadStats() { + region_ = java.util.Collections.emptyList(); + stat_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MultiRegionLoadStats( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -32811,7 +33595,8 @@ public final class ClientProtos { region_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - region_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry)); + region_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry)); break; } case 18: { @@ -32819,7 +33604,8 @@ public final class ClientProtos { stat_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - stat_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.PARSER, extensionRegistry)); + stat_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.PARSER, extensionRegistry)); break; } } @@ -32828,7 +33614,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { region_ = java.util.Collections.unmodifiableList(region_); @@ -32845,29 +33631,13 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MultiRegionLoadStats parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MultiRegionLoadStats(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private java.util.List region_; /** @@ -32903,7 +33673,6 @@ public final class ClientProtos { return region_.get(index); } - // repeated .hbase.pb.RegionLoadStats stat = 2; public static final int STAT_FIELD_NUMBER = 2; private java.util.List stat_; /** @@ -32939,14 +33708,11 @@ public final class ClientProtos { return stat_.get(index); } - private void initFields() { - region_ = java.util.Collections.emptyList(); - stat_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getRegionCount(); i++) { if (!getRegion(i).isInitialized()) { @@ -32960,19 +33726,17 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < region_.size(); i++) { output.writeMessage(1, region_.get(i)); } for (int i = 0; i < stat_.size(); i++) { output.writeMessage(2, stat_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -32984,19 +33748,13 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, stat_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -33011,12 +33769,10 @@ public final class ClientProtos { .equals(other.getRegionList()); result = result && getStatList() .equals(other.getStatList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -33032,7 +33788,7 @@ public final class ClientProtos { hash = (37 * hash) + STAT_FIELD_NUMBER; hash = (53 * hash) + getStatList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -33060,46 +33816,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -33107,14 +33874,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.MultiRegionLoadStats} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MultiRegionLoadStats) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -33127,20 +33895,17 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getStatFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { @@ -33158,10 +33923,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_descriptor; @@ -33204,6 +33965,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats)other); @@ -33234,7 +34021,7 @@ public final class ClientProtos { region_ = other.region_; bitField0_ = (bitField0_ & ~0x00000001); regionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionFieldBuilder() : null; } else { regionBuilder_.addAllMessages(other.region_); @@ -33260,21 +34047,21 @@ public final class ClientProtos { stat_ = other.stat_; bitField0_ = (bitField0_ & ~0x00000002); statBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStatFieldBuilder() : null; } else { statBuilder_.addAllMessages(other.stat_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getRegionCount(); i++) { if (!getRegion(i).isInitialized()) { - return false; } } @@ -33290,7 +34077,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -33300,7 +34087,6 @@ public final class ClientProtos { } private int bitField0_; - // repeated .hbase.pb.RegionSpecifier region = 1; private java.util.List region_ = java.util.Collections.emptyList(); private void ensureRegionIsMutable() { @@ -33310,7 +34096,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** @@ -33442,7 +34228,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (regionBuilder_ == null) { ensureRegionIsMutable(); - super.addAll(values, region_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, region_); onChanged(); } else { regionBuilder_.addAllMessages(values); @@ -33525,11 +34312,11 @@ public final class ClientProtos { getRegionBuilderList() { return getRegionFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( region_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -33540,7 +34327,6 @@ public final class ClientProtos { return regionBuilder_; } - // repeated .hbase.pb.RegionLoadStats stat = 2; private java.util.List stat_ = java.util.Collections.emptyList(); private void ensureStatIsMutable() { @@ -33550,7 +34336,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> statBuilder_; /** @@ -33682,7 +34468,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (statBuilder_ == null) { ensureStatIsMutable(); - super.addAll(values, stat_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, stat_); onChanged(); } else { statBuilder_.addAllMessages(values); @@ -33765,11 +34552,11 @@ public final class ClientProtos { getStatBuilderList() { return getStatFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> getStatFieldBuilder() { if (statBuilder_ == null) { - statBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + statBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>( stat_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -33779,42 +34566,78 @@ public final class ClientProtos { } return statBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MultiRegionLoadStats) } + // @@protoc_insertion_point(class_scope:hbase.pb.MultiRegionLoadStats) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats DEFAULT_INSTANCE; static { - defaultInstance = new MultiRegionLoadStats(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MultiRegionLoadStats parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiRegionLoadStats(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MultiRegionLoadStats) } - public interface ResultOrExceptionOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ResultOrExceptionOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ResultOrException) + com.google.protobuf.MessageOrBuilder { - // optional uint32 index = 1; /** - * optional uint32 index = 1; - * *
      * If part of a multi call, save original index of the list of all
      * passed so can align this response w/ original request.
      * 
+ * + * optional uint32 index = 1; */ boolean hasIndex(); /** - * optional uint32 index = 1; - * *
      * If part of a multi call, save original index of the list of all
      * passed so can align this response w/ original request.
      * 
+ * + * optional uint32 index = 1; */ int getIndex(); - // optional .hbase.pb.Result result = 2; /** * optional .hbase.pb.Result result = 2; */ @@ -33828,7 +34651,6 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); - // optional .hbase.pb.NameBytesPair exception = 3; /** * optional .hbase.pb.NameBytesPair exception = 3; */ @@ -33842,98 +34664,88 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); - // optional .hbase.pb.CoprocessorServiceResult service_result = 4; /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
      * result if this was a coprocessor service call
      * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ boolean hasServiceResult(); /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
      * result if this was a coprocessor service call
      * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult(); /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
      * result if this was a coprocessor service call
      * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder(); - // optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
      * current load on the region
      * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated boolean hasLoadStats(); /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
      * current load on the region
      * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats(); /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
      * current load on the region
      * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder(); } /** - * Protobuf type {@code hbase.pb.ResultOrException} - * *
    **
    * Either a Result or an Exception NameBytesPair (keyed by
    * exception name whose value is the exception stringified)
    * or maybe empty if no result and no exception.
    * 
+ * + * Protobuf type {@code hbase.pb.ResultOrException} */ - public static final class ResultOrException extends - com.google.protobuf.GeneratedMessage - implements ResultOrExceptionOrBuilder { + public static final class ResultOrException extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ResultOrException) + ResultOrExceptionOrBuilder { // Use ResultOrException.newBuilder() to construct. - private ResultOrException(com.google.protobuf.GeneratedMessage.Builder builder) { + private ResultOrException(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ResultOrException(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ResultOrException defaultInstance; - public static ResultOrException getDefaultInstance() { - return defaultInstance; } - - public ResultOrException getDefaultInstanceForType() { - return defaultInstance; + private ResultOrException() { + index_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ResultOrException( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -34015,7 +34827,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -34026,56 +34838,39 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ResultOrException parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ResultOrException(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint32 index = 1; public static final int INDEX_FIELD_NUMBER = 1; private int index_; /** - * optional uint32 index = 1; - * *
      * If part of a multi call, save original index of the list of all
      * passed so can align this response w/ original request.
      * 
+ * + * optional uint32 index = 1; */ public boolean hasIndex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint32 index = 1; - * *
      * If part of a multi call, save original index of the list of all
      * passed so can align this response w/ original request.
      * 
+ * + * optional uint32 index = 1; */ public int getIndex() { return index_; } - // optional .hbase.pb.Result result = 2; public static final int RESULT_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_; /** @@ -34088,16 +34883,15 @@ public final class ClientProtos { * optional .hbase.pb.Result result = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { - return result_; + return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } /** * optional .hbase.pb.Result result = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { - return result_; + return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } - // optional .hbase.pb.NameBytesPair exception = 3; public static final int EXCEPTION_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair exception_; /** @@ -34110,94 +34904,86 @@ public final class ClientProtos { * optional .hbase.pb.NameBytesPair exception = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException() { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } /** * optional .hbase.pb.NameBytesPair exception = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } - // optional .hbase.pb.CoprocessorServiceResult service_result = 4; public static final int SERVICE_RESULT_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_; /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
      * result if this was a coprocessor service call
      * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public boolean hasServiceResult() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
      * result if this was a coprocessor service call
      * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() { - return serviceResult_; + return serviceResult_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance() : serviceResult_; } /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
      * result if this was a coprocessor service call
      * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() { - return serviceResult_; + return serviceResult_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance() : serviceResult_; } - // optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; public static final int LOADSTATS_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats loadStats_; /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
      * current load on the region
      * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public boolean hasLoadStats() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
      * current load on the region
      * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() { - return loadStats_; + return loadStats_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance() : loadStats_; } /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
      * current load on the region
      * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() { - return loadStats_; + return loadStats_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance() : loadStats_; } - private void initFields() { - index_ = 0; - result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - serviceResult_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance(); - loadStats_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasException()) { if (!getException().isInitialized()) { @@ -34217,28 +35003,26 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, index_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, result_); + output.writeMessage(2, getResult()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, exception_); + output.writeMessage(3, getException()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, serviceResult_); + output.writeMessage(4, getServiceResult()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, loadStats_); + output.writeMessage(5, getLoadStats()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -34248,33 +35032,27 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, result_); + .computeMessageSize(2, getResult()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, exception_); + .computeMessageSize(3, getException()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, serviceResult_); + .computeMessageSize(4, getServiceResult()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, loadStats_); + .computeMessageSize(5, getLoadStats()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -34310,12 +35088,10 @@ public final class ClientProtos { result = result && getLoadStats() .equals(other.getLoadStats()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -34343,7 +35119,7 @@ public final class ClientProtos { hash = (37 * hash) + LOADSTATS_FIELD_NUMBER; hash = (53 * hash) + getLoadStats().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -34371,68 +35147,80 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ResultOrException} - * *
      **
      * Either a Result or an Exception NameBytesPair (keyed by
      * exception name whose value is the exception stringified)
      * or maybe empty if no result and no exception.
      * 
+ * + * Protobuf type {@code hbase.pb.ResultOrException} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ResultOrException) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -34445,46 +35233,43 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getResultFieldBuilder(); getExceptionFieldBuilder(); getServiceResultFieldBuilder(); getLoadStatsFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); index_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + result_ = null; } else { resultBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + exception_ = null; } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); if (serviceResultBuilder_ == null) { - serviceResult_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance(); + serviceResult_ = null; } else { serviceResultBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); if (loadStatsBuilder_ == null) { - loadStats_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance(); + loadStats_ = null; } else { loadStatsBuilder_.clear(); } @@ -34492,10 +35277,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor; @@ -34558,6 +35339,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException)other); @@ -34584,20 +35391,19 @@ public final class ClientProtos { if (other.hasLoadStats()) { mergeLoadStats(other.getLoadStats()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasException()) { if (!getException().isInitialized()) { - return false; } } if (hasServiceResult()) { if (!getServiceResult().isInitialized()) { - return false; } } @@ -34613,7 +35419,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -34623,37 +35429,36 @@ public final class ClientProtos { } private int bitField0_; - // optional uint32 index = 1; private int index_ ; /** - * optional uint32 index = 1; - * *
        * If part of a multi call, save original index of the list of all
        * passed so can align this response w/ original request.
        * 
+ * + * optional uint32 index = 1; */ public boolean hasIndex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint32 index = 1; - * *
        * If part of a multi call, save original index of the list of all
        * passed so can align this response w/ original request.
        * 
+ * + * optional uint32 index = 1; */ public int getIndex() { return index_; } /** - * optional uint32 index = 1; - * *
        * If part of a multi call, save original index of the list of all
        * passed so can align this response w/ original request.
        * 
+ * + * optional uint32 index = 1; */ public Builder setIndex(int value) { bitField0_ |= 0x00000001; @@ -34662,12 +35467,12 @@ public final class ClientProtos { return this; } /** - * optional uint32 index = 1; - * *
        * If part of a multi call, save original index of the list of all
        * passed so can align this response w/ original request.
        * 
+ * + * optional uint32 index = 1; */ public Builder clearIndex() { bitField0_ = (bitField0_ & ~0x00000001); @@ -34676,9 +35481,8 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.Result result = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; /** * optional .hbase.pb.Result result = 2; @@ -34691,7 +35495,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { if (resultBuilder_ == null) { - return result_; + return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } else { return resultBuilder_.getMessage(); } @@ -34732,6 +35536,7 @@ public final class ClientProtos { public Builder mergeResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + result_ != null && result_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); @@ -34750,7 +35555,7 @@ public final class ClientProtos { */ public Builder clearResult() { if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + result_ = null; onChanged(); } else { resultBuilder_.clear(); @@ -34773,19 +35578,20 @@ public final class ClientProtos { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilder(); } else { - return result_; + return result_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } } /** * optional .hbase.pb.Result result = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + resultBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder>( - result_, + getResult(), getParentForChildren(), isClean()); result_ = null; @@ -34793,9 +35599,8 @@ public final class ClientProtos { return resultBuilder_; } - // optional .hbase.pb.NameBytesPair exception = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair exception_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; /** * optional .hbase.pb.NameBytesPair exception = 3; @@ -34808,7 +35613,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException() { if (exceptionBuilder_ == null) { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } else { return exceptionBuilder_.getMessage(); } @@ -34849,6 +35654,7 @@ public final class ClientProtos { public Builder mergeException(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + exception_ != null && exception_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); @@ -34867,7 +35673,7 @@ public final class ClientProtos { */ public Builder clearException() { if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + exception_ = null; onChanged(); } else { exceptionBuilder_.clear(); @@ -34890,19 +35696,20 @@ public final class ClientProtos { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { - return exception_; + return exception_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } } /** * optional .hbase.pb.NameBytesPair exception = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { - exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - exception_, + getException(), getParentForChildren(), isClean()); exception_ = null; @@ -34910,40 +35717,39 @@ public final class ClientProtos { return exceptionBuilder_; } - // optional .hbase.pb.CoprocessorServiceResult service_result = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder> serviceResultBuilder_; /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
        * result if this was a coprocessor service call
        * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public boolean hasServiceResult() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
        * result if this was a coprocessor service call
        * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() { if (serviceResultBuilder_ == null) { - return serviceResult_; + return serviceResult_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance() : serviceResult_; } else { return serviceResultBuilder_.getMessage(); } } /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
        * result if this was a coprocessor service call
        * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public Builder setServiceResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult value) { if (serviceResultBuilder_ == null) { @@ -34959,11 +35765,11 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
        * result if this was a coprocessor service call
        * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public Builder setServiceResult( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder builderForValue) { @@ -34977,15 +35783,16 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
        * result if this was a coprocessor service call
        * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public Builder mergeServiceResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult value) { if (serviceResultBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + serviceResult_ != null && serviceResult_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) { serviceResult_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder(serviceResult_).mergeFrom(value).buildPartial(); @@ -35000,15 +35807,15 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
        * result if this was a coprocessor service call
        * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public Builder clearServiceResult() { if (serviceResultBuilder_ == null) { - serviceResult_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance(); + serviceResult_ = null; onChanged(); } else { serviceResultBuilder_.clear(); @@ -35017,11 +35824,11 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
        * result if this was a coprocessor service call
        * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder getServiceResultBuilder() { bitField0_ |= 0x00000008; @@ -35029,33 +35836,34 @@ public final class ClientProtos { return getServiceResultFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
        * result if this was a coprocessor service call
        * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() { if (serviceResultBuilder_ != null) { return serviceResultBuilder_.getMessageOrBuilder(); } else { - return serviceResult_; + return serviceResult_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance() : serviceResult_; } } /** - * optional .hbase.pb.CoprocessorServiceResult service_result = 4; - * *
        * result if this was a coprocessor service call
        * 
+ * + * optional .hbase.pb.CoprocessorServiceResult service_result = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder> getServiceResultFieldBuilder() { if (serviceResultBuilder_ == null) { - serviceResultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serviceResultBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder>( - serviceResult_, + getServiceResult(), getParentForChildren(), isClean()); serviceResult_ = null; @@ -35063,40 +35871,39 @@ public final class ClientProtos { return serviceResultBuilder_; } - // optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats loadStats_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats loadStats_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> loadStatsBuilder_; /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
        * current load on the region
        * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public boolean hasLoadStats() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
        * current load on the region
        * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() { if (loadStatsBuilder_ == null) { - return loadStats_; + return loadStats_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance() : loadStats_; } else { return loadStatsBuilder_.getMessage(); } } /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
        * current load on the region
        * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public Builder setLoadStats(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats value) { if (loadStatsBuilder_ == null) { @@ -35112,11 +35919,11 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
        * current load on the region
        * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public Builder setLoadStats( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) { @@ -35130,15 +35937,16 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
        * current load on the region
        * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public Builder mergeLoadStats(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats value) { if (loadStatsBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && + loadStats_ != null && loadStats_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) { loadStats_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder(loadStats_).mergeFrom(value).buildPartial(); @@ -35153,15 +35961,15 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
        * current load on the region
        * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public Builder clearLoadStats() { if (loadStatsBuilder_ == null) { - loadStats_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance(); + loadStats_ = null; onChanged(); } else { loadStatsBuilder_.clear(); @@ -35170,11 +35978,11 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
        * current load on the region
        * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder getLoadStatsBuilder() { bitField0_ |= 0x00000010; @@ -35182,55 +35990,93 @@ public final class ClientProtos { return getLoadStatsFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
        * current load on the region
        * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() { if (loadStatsBuilder_ != null) { return loadStatsBuilder_.getMessageOrBuilder(); } else { - return loadStats_; + return loadStats_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance() : loadStats_; } } /** - * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; - * *
        * current load on the region
        * 
+ * + * optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true]; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> getLoadStatsFieldBuilder() { if (loadStatsBuilder_ == null) { - loadStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder< + loadStatsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>( - loadStats_, + getLoadStats(), getParentForChildren(), isClean()); loadStats_ = null; } return loadStatsBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ResultOrException) } + // @@protoc_insertion_point(class_scope:hbase.pb.ResultOrException) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException DEFAULT_INSTANCE; static { - defaultInstance = new ResultOrException(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ResultOrException parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ResultOrException(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ResultOrException) } - public interface RegionActionResultOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionActionResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionActionResult) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.ResultOrException resultOrException = 1; /** * repeated .hbase.pb.ResultOrException resultOrException = 1; */ @@ -35255,70 +36101,61 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder( int index); - // optional .hbase.pb.NameBytesPair exception = 2; /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
      * If the operation failed globally for this region, this exception is set
      * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ boolean hasException(); /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
      * If the operation failed globally for this region, this exception is set
      * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException(); /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
      * If the operation failed globally for this region, this exception is set
      * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); } /** - * Protobuf type {@code hbase.pb.RegionActionResult} - * *
    **
    * The result of a RegionAction.
    * 
+ * + * Protobuf type {@code hbase.pb.RegionActionResult} */ - public static final class RegionActionResult extends - com.google.protobuf.GeneratedMessage - implements RegionActionResultOrBuilder { + public static final class RegionActionResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionActionResult) + RegionActionResultOrBuilder { // Use RegionActionResult.newBuilder() to construct. - private RegionActionResult(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionActionResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RegionActionResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionActionResult defaultInstance; - public static RegionActionResult getDefaultInstance() { - return defaultInstance; - } - - public RegionActionResult getDefaultInstanceForType() { - return defaultInstance; + private RegionActionResult() { + resultOrException_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionActionResult( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -35342,7 +36179,8 @@ public final class ClientProtos { resultOrException_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - resultOrException_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.PARSER, extensionRegistry)); + resultOrException_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.PARSER, extensionRegistry)); break; } case 18: { @@ -35364,7 +36202,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_); @@ -35378,30 +36216,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionActionResult parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionActionResult(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // repeated .hbase.pb.ResultOrException resultOrException = 1; public static final int RESULTOREXCEPTION_FIELD_NUMBER = 1; private java.util.List resultOrException_; /** @@ -35437,48 +36259,44 @@ public final class ClientProtos { return resultOrException_.get(index); } - // optional .hbase.pb.NameBytesPair exception = 2; public static final int EXCEPTION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair exception_; /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
      * If the operation failed globally for this region, this exception is set
      * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public boolean hasException() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
      * If the operation failed globally for this region, this exception is set
      * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException() { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
      * If the operation failed globally for this region, this exception is set
      * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } - private void initFields() { - resultOrException_ = java.util.Collections.emptyList(); - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getResultOrExceptionCount(); i++) { if (!getResultOrException(i).isInitialized()) { @@ -35498,19 +36316,17 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < resultOrException_.size(); i++) { output.writeMessage(1, resultOrException_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(2, exception_); + output.writeMessage(2, getException()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -35520,21 +36336,15 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, exception_); + .computeMessageSize(2, getException()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -35552,12 +36362,10 @@ public final class ClientProtos { result = result && getException() .equals(other.getException()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -35573,7 +36381,7 @@ public final class ClientProtos { hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; hash = (53 * hash) + getException().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -35601,66 +36409,78 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RegionActionResult} - * *
      **
      * The result of a RegionAction.
      * 
+ * + * Protobuf type {@code hbase.pb.RegionActionResult} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionActionResult) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -35673,20 +36493,17 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getResultOrExceptionFieldBuilder(); getExceptionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (resultOrExceptionBuilder_ == null) { @@ -35696,7 +36513,7 @@ public final class ClientProtos { resultOrExceptionBuilder_.clear(); } if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + exception_ = null; } else { exceptionBuilder_.clear(); } @@ -35704,10 +36521,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor; @@ -35751,6 +36564,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult)other); @@ -35781,7 +36620,7 @@ public final class ClientProtos { resultOrException_ = other.resultOrException_; bitField0_ = (bitField0_ & ~0x00000001); resultOrExceptionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResultOrExceptionFieldBuilder() : null; } else { resultOrExceptionBuilder_.addAllMessages(other.resultOrException_); @@ -35791,20 +36630,19 @@ public final class ClientProtos { if (other.hasException()) { mergeException(other.getException()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getResultOrExceptionCount(); i++) { if (!getResultOrException(i).isInitialized()) { - return false; } } if (hasException()) { if (!getException().isInitialized()) { - return false; } } @@ -35820,7 +36658,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -35830,7 +36668,6 @@ public final class ClientProtos { } private int bitField0_; - // repeated .hbase.pb.ResultOrException resultOrException = 1; private java.util.List resultOrException_ = java.util.Collections.emptyList(); private void ensureResultOrExceptionIsMutable() { @@ -35840,7 +36677,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> resultOrExceptionBuilder_; /** @@ -35972,7 +36809,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (resultOrExceptionBuilder_ == null) { ensureResultOrExceptionIsMutable(); - super.addAll(values, resultOrException_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, resultOrException_); onChanged(); } else { resultOrExceptionBuilder_.addAllMessages(values); @@ -36055,11 +36893,11 @@ public final class ClientProtos { getResultOrExceptionBuilderList() { return getResultOrExceptionFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> getResultOrExceptionFieldBuilder() { if (resultOrExceptionBuilder_ == null) { - resultOrExceptionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + resultOrExceptionBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>( resultOrException_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -36070,40 +36908,39 @@ public final class ClientProtos { return resultOrExceptionBuilder_; } - // optional .hbase.pb.NameBytesPair exception = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair exception_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
        * If the operation failed globally for this region, this exception is set
        * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
        * If the operation failed globally for this region, this exception is set
        * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException() { if (exceptionBuilder_ == null) { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } else { return exceptionBuilder_.getMessage(); } } /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
        * If the operation failed globally for this region, this exception is set
        * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public Builder setException(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (exceptionBuilder_ == null) { @@ -36119,11 +36956,11 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
        * If the operation failed globally for this region, this exception is set
        * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public Builder setException( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { @@ -36137,15 +36974,16 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
        * If the operation failed globally for this region, this exception is set
        * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public Builder mergeException(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + exception_ != null && exception_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); @@ -36160,15 +36998,15 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
        * If the operation failed globally for this region, this exception is set
        * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public Builder clearException() { if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + exception_ = null; onChanged(); } else { exceptionBuilder_.clear(); @@ -36177,11 +37015,11 @@ public final class ClientProtos { return this; } /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
        * If the operation failed globally for this region, this exception is set
        * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { bitField0_ |= 0x00000002; @@ -36189,55 +37027,93 @@ public final class ClientProtos { return getExceptionFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
        * If the operation failed globally for this region, this exception is set
        * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { - return exception_; + return exception_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } } /** - * optional .hbase.pb.NameBytesPair exception = 2; - * *
        * If the operation failed globally for this region, this exception is set
        * 
+ * + * optional .hbase.pb.NameBytesPair exception = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { - exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - exception_, + getException(), getParentForChildren(), isClean()); exception_ = null; } return exceptionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionActionResult) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionActionResult) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult DEFAULT_INSTANCE; static { - defaultInstance = new RegionActionResult(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionActionResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionActionResult(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionActionResult) } - public interface MultiRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MultiRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MultiRequest) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.RegionAction regionAction = 1; /** * repeated .hbase.pb.RegionAction regionAction = 1; */ @@ -36262,7 +37138,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder( int index); - // optional uint64 nonceGroup = 2; /** * optional uint64 nonceGroup = 2; */ @@ -36272,7 +37147,6 @@ public final class ClientProtos { */ long getNonceGroup(); - // optional .hbase.pb.Condition condition = 3; /** * optional .hbase.pb.Condition condition = 3; */ @@ -36287,8 +37161,6 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder(); } /** - * Protobuf type {@code hbase.pb.MultiRequest} - * *
    **
    * Execute a list of actions on a given region in order.
@@ -36297,37 +37169,32 @@ public final class ClientProtos {
    *  done by the region specifier but by keeping the order of the RegionActionResult vs.
    *  the order of the RegionAction.
    * 
+ * + * Protobuf type {@code hbase.pb.MultiRequest} */ - public static final class MultiRequest extends - com.google.protobuf.GeneratedMessage - implements MultiRequestOrBuilder { + public static final class MultiRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MultiRequest) + MultiRequestOrBuilder { // Use MultiRequest.newBuilder() to construct. - private MultiRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private MultiRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MultiRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MultiRequest defaultInstance; - public static MultiRequest getDefaultInstance() { - return defaultInstance; } - - public MultiRequest getDefaultInstanceForType() { - return defaultInstance; + private MultiRequest() { + regionAction_ = java.util.Collections.emptyList(); + nonceGroup_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MultiRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -36351,7 +37218,8 @@ public final class ClientProtos { regionAction_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - regionAction_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.PARSER, extensionRegistry)); + regionAction_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.PARSER, extensionRegistry)); break; } case 16: { @@ -36378,7 +37246,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { regionAction_ = java.util.Collections.unmodifiableList(regionAction_); @@ -36392,30 +37260,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MultiRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MultiRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // repeated .hbase.pb.RegionAction regionAction = 1; public static final int REGIONACTION_FIELD_NUMBER = 1; private java.util.List regionAction_; /** @@ -36451,7 +37303,6 @@ public final class ClientProtos { return regionAction_.get(index); } - // optional uint64 nonceGroup = 2; public static final int NONCEGROUP_FIELD_NUMBER = 2; private long nonceGroup_; /** @@ -36467,7 +37318,6 @@ public final class ClientProtos { return nonceGroup_; } - // optional .hbase.pb.Condition condition = 3; public static final int CONDITION_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition condition_; /** @@ -36480,24 +37330,20 @@ public final class ClientProtos { * optional .hbase.pb.Condition condition = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getCondition() { - return condition_; + return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } /** * optional .hbase.pb.Condition condition = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { - return condition_; + return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } - private void initFields() { - regionAction_ = java.util.Collections.emptyList(); - nonceGroup_ = 0L; - condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getRegionActionCount(); i++) { if (!getRegionAction(i).isInitialized()) { @@ -36517,7 +37363,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < regionAction_.size(); i++) { output.writeMessage(1, regionAction_.get(i)); } @@ -36525,14 +37370,13 @@ public final class ClientProtos { output.writeUInt64(2, nonceGroup_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(3, condition_); + output.writeMessage(3, getCondition()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -36546,21 +37390,15 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, condition_); + .computeMessageSize(3, getCondition()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -36583,12 +37421,10 @@ public final class ClientProtos { result = result && getCondition() .equals(other.getCondition()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -36602,13 +37438,14 @@ public final class ClientProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasCondition()) { hash = (37 * hash) + CONDITION_FIELD_NUMBER; hash = (53 * hash) + getCondition().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -36636,52 +37473,61 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.MultiRequest} - * *
      **
      * Execute a list of actions on a given region in order.
@@ -36690,16 +37536,19 @@ public final class ClientProtos {
      *  done by the region specifier but by keeping the order of the RegionActionResult vs.
      *  the order of the RegionAction.
      * 
+ * + * Protobuf type {@code hbase.pb.MultiRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MultiRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -36712,20 +37561,17 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionActionFieldBuilder(); getConditionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionActionBuilder_ == null) { @@ -36737,7 +37583,7 @@ public final class ClientProtos { nonceGroup_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); if (conditionBuilder_ == null) { - condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + condition_ = null; } else { conditionBuilder_.clear(); } @@ -36745,10 +37591,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor; @@ -36796,6 +37638,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest)other); @@ -36826,7 +37694,7 @@ public final class ClientProtos { regionAction_ = other.regionAction_; bitField0_ = (bitField0_ & ~0x00000001); regionActionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionActionFieldBuilder() : null; } else { regionActionBuilder_.addAllMessages(other.regionAction_); @@ -36839,20 +37707,19 @@ public final class ClientProtos { if (other.hasCondition()) { mergeCondition(other.getCondition()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getRegionActionCount(); i++) { if (!getRegionAction(i).isInitialized()) { - return false; } } if (hasCondition()) { if (!getCondition().isInitialized()) { - return false; } } @@ -36868,7 +37735,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -36878,7 +37745,6 @@ public final class ClientProtos { } private int bitField0_; - // repeated .hbase.pb.RegionAction regionAction = 1; private java.util.List regionAction_ = java.util.Collections.emptyList(); private void ensureRegionActionIsMutable() { @@ -36888,7 +37754,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder> regionActionBuilder_; /** @@ -37020,7 +37886,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (regionActionBuilder_ == null) { ensureRegionActionIsMutable(); - super.addAll(values, regionAction_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionAction_); onChanged(); } else { regionActionBuilder_.addAllMessages(values); @@ -37103,11 +37970,11 @@ public final class ClientProtos { getRegionActionBuilderList() { return getRegionActionFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder> getRegionActionFieldBuilder() { if (regionActionBuilder_ == null) { - regionActionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionActionBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder>( regionAction_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -37118,7 +37985,6 @@ public final class ClientProtos { return regionActionBuilder_; } - // optional uint64 nonceGroup = 2; private long nonceGroup_ ; /** * optional uint64 nonceGroup = 2; @@ -37151,9 +38017,8 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.Condition condition = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition condition_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_; /** * optional .hbase.pb.Condition condition = 3; @@ -37166,7 +38031,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getCondition() { if (conditionBuilder_ == null) { - return condition_; + return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } else { return conditionBuilder_.getMessage(); } @@ -37207,6 +38072,7 @@ public final class ClientProtos { public Builder mergeCondition(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition value) { if (conditionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + condition_ != null && condition_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) { condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); @@ -37225,7 +38091,7 @@ public final class ClientProtos { */ public Builder clearCondition() { if (conditionBuilder_ == null) { - condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + condition_ = null; onChanged(); } else { conditionBuilder_.clear(); @@ -37248,41 +38114,79 @@ public final class ClientProtos { if (conditionBuilder_ != null) { return conditionBuilder_.getMessageOrBuilder(); } else { - return condition_; + return condition_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } } /** * optional .hbase.pb.Condition condition = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder> getConditionFieldBuilder() { if (conditionBuilder_ == null) { - conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + conditionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder>( - condition_, + getCondition(), getParentForChildren(), isClean()); condition_ = null; } return conditionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MultiRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.MultiRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest DEFAULT_INSTANCE; static { - defaultInstance = new MultiRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MultiRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MultiRequest) } - public interface MultiResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MultiResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MultiResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.RegionActionResult regionActionResult = 1; /** * repeated .hbase.pb.RegionActionResult regionActionResult = 1; */ @@ -37307,25 +38211,23 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder( int index); - // optional bool processed = 2; /** - * optional bool processed = 2; - * *
      * used for mutate to indicate processed only
      * 
+ * + * optional bool processed = 2; */ boolean hasProcessed(); /** - * optional bool processed = 2; - * *
      * used for mutate to indicate processed only
      * 
+ * + * optional bool processed = 2; */ boolean getProcessed(); - // optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3; /** * optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3; */ @@ -37342,36 +38244,29 @@ public final class ClientProtos { /** * Protobuf type {@code hbase.pb.MultiResponse} */ - public static final class MultiResponse extends - com.google.protobuf.GeneratedMessage - implements MultiResponseOrBuilder { + public static final class MultiResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MultiResponse) + MultiResponseOrBuilder { // Use MultiResponse.newBuilder() to construct. - private MultiResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private MultiResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MultiResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MultiResponse defaultInstance; - public static MultiResponse getDefaultInstance() { - return defaultInstance; } - - public MultiResponse getDefaultInstanceForType() { - return defaultInstance; + private MultiResponse() { + regionActionResult_ = java.util.Collections.emptyList(); + processed_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MultiResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -37395,7 +38290,8 @@ public final class ClientProtos { regionActionResult_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - regionActionResult_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.PARSER, extensionRegistry)); + regionActionResult_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.PARSER, extensionRegistry)); break; } case 16: { @@ -37422,7 +38318,7 @@ public final class ClientProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_); @@ -37436,30 +38332,14 @@ public final class ClientProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MultiResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MultiResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // repeated .hbase.pb.RegionActionResult regionActionResult = 1; public static final int REGIONACTIONRESULT_FIELD_NUMBER = 1; private java.util.List regionActionResult_; /** @@ -37495,31 +38375,29 @@ public final class ClientProtos { return regionActionResult_.get(index); } - // optional bool processed = 2; public static final int PROCESSED_FIELD_NUMBER = 2; private boolean processed_; /** - * optional bool processed = 2; - * *
      * used for mutate to indicate processed only
      * 
+ * + * optional bool processed = 2; */ public boolean hasProcessed() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional bool processed = 2; - * *
      * used for mutate to indicate processed only
      * 
+ * + * optional bool processed = 2; */ public boolean getProcessed() { return processed_; } - // optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3; public static final int REGIONSTATISTICS_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats regionStatistics_; /** @@ -37532,24 +38410,20 @@ public final class ClientProtos { * optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats getRegionStatistics() { - return regionStatistics_; + return regionStatistics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance() : regionStatistics_; } /** * optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder getRegionStatisticsOrBuilder() { - return regionStatistics_; + return regionStatistics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance() : regionStatistics_; } - private void initFields() { - regionActionResult_ = java.util.Collections.emptyList(); - processed_ = false; - regionStatistics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getRegionActionResultCount(); i++) { if (!getRegionActionResult(i).isInitialized()) { @@ -37569,7 +38443,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < regionActionResult_.size(); i++) { output.writeMessage(1, regionActionResult_.get(i)); } @@ -37577,14 +38450,13 @@ public final class ClientProtos { output.writeBool(2, processed_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(3, regionStatistics_); + output.writeMessage(3, getRegionStatistics()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -37598,21 +38470,15 @@ public final class ClientProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, regionStatistics_); + .computeMessageSize(3, getRegionStatistics()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -37635,12 +38501,10 @@ public final class ClientProtos { result = result && getRegionStatistics() .equals(other.getRegionStatistics()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -37654,13 +38518,14 @@ public final class ClientProtos { } if (hasProcessed()) { hash = (37 * hash) + PROCESSED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getProcessed()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getProcessed()); } if (hasRegionStatistics()) { hash = (37 * hash) + REGIONSTATISTICS_FIELD_NUMBER; hash = (53 * hash) + getRegionStatistics().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -37688,46 +38553,57 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -37735,14 +38611,15 @@ public final class ClientProtos { * Protobuf type {@code hbase.pb.MultiResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MultiResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -37755,20 +38632,17 @@ public final class ClientProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionActionResultFieldBuilder(); getRegionStatisticsFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionActionResultBuilder_ == null) { @@ -37780,7 +38654,7 @@ public final class ClientProtos { processed_ = false; bitField0_ = (bitField0_ & ~0x00000002); if (regionStatisticsBuilder_ == null) { - regionStatistics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance(); + regionStatistics_ = null; } else { regionStatisticsBuilder_.clear(); } @@ -37788,10 +38662,6 @@ public final class ClientProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor; @@ -37839,6 +38709,32 @@ public final class ClientProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse)other); @@ -37869,7 +38765,7 @@ public final class ClientProtos { regionActionResult_ = other.regionActionResult_; bitField0_ = (bitField0_ & ~0x00000001); regionActionResultBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionActionResultFieldBuilder() : null; } else { regionActionResultBuilder_.addAllMessages(other.regionActionResult_); @@ -37882,20 +38778,19 @@ public final class ClientProtos { if (other.hasRegionStatistics()) { mergeRegionStatistics(other.getRegionStatistics()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getRegionActionResultCount(); i++) { if (!getRegionActionResult(i).isInitialized()) { - return false; } } if (hasRegionStatistics()) { if (!getRegionStatistics().isInitialized()) { - return false; } } @@ -37911,7 +38806,7 @@ public final class ClientProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -37921,7 +38816,6 @@ public final class ClientProtos { } private int bitField0_; - // repeated .hbase.pb.RegionActionResult regionActionResult = 1; private java.util.List regionActionResult_ = java.util.Collections.emptyList(); private void ensureRegionActionResultIsMutable() { @@ -37931,7 +38825,7 @@ public final class ClientProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> regionActionResultBuilder_; /** @@ -38063,7 +38957,8 @@ public final class ClientProtos { java.lang.Iterable values) { if (regionActionResultBuilder_ == null) { ensureRegionActionResultIsMutable(); - super.addAll(values, regionActionResult_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionActionResult_); onChanged(); } else { regionActionResultBuilder_.addAllMessages(values); @@ -38146,11 +39041,11 @@ public final class ClientProtos { getRegionActionResultBuilderList() { return getRegionActionResultFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> getRegionActionResultFieldBuilder() { if (regionActionResultBuilder_ == null) { - regionActionResultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionActionResultBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>( regionActionResult_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -38161,34 +39056,33 @@ public final class ClientProtos { return regionActionResultBuilder_; } - // optional bool processed = 2; private boolean processed_ ; /** - * optional bool processed = 2; - * *
        * used for mutate to indicate processed only
        * 
+ * + * optional bool processed = 2; */ public boolean hasProcessed() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional bool processed = 2; - * *
        * used for mutate to indicate processed only
        * 
+ * + * optional bool processed = 2; */ public boolean getProcessed() { return processed_; } /** - * optional bool processed = 2; - * *
        * used for mutate to indicate processed only
        * 
+ * + * optional bool processed = 2; */ public Builder setProcessed(boolean value) { bitField0_ |= 0x00000002; @@ -38197,11 +39091,11 @@ public final class ClientProtos { return this; } /** - * optional bool processed = 2; - * *
        * used for mutate to indicate processed only
        * 
+ * + * optional bool processed = 2; */ public Builder clearProcessed() { bitField0_ = (bitField0_ & ~0x00000002); @@ -38210,9 +39104,8 @@ public final class ClientProtos { return this; } - // optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats regionStatistics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats regionStatistics_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder> regionStatisticsBuilder_; /** * optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3; @@ -38225,7 +39118,7 @@ public final class ClientProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats getRegionStatistics() { if (regionStatisticsBuilder_ == null) { - return regionStatistics_; + return regionStatistics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance() : regionStatistics_; } else { return regionStatisticsBuilder_.getMessage(); } @@ -38266,6 +39159,7 @@ public final class ClientProtos { public Builder mergeRegionStatistics(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats value) { if (regionStatisticsBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + regionStatistics_ != null && regionStatistics_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance()) { regionStatistics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.newBuilder(regionStatistics_).mergeFrom(value).buildPartial(); @@ -38284,7 +39178,7 @@ public final class ClientProtos { */ public Builder clearRegionStatistics() { if (regionStatisticsBuilder_ == null) { - regionStatistics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance(); + regionStatistics_ = null; onChanged(); } else { regionStatisticsBuilder_.clear(); @@ -38307,35 +39201,73 @@ public final class ClientProtos { if (regionStatisticsBuilder_ != null) { return regionStatisticsBuilder_.getMessageOrBuilder(); } else { - return regionStatistics_; + return regionStatistics_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance() : regionStatistics_; } } /** * optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder> getRegionStatisticsFieldBuilder() { if (regionStatisticsBuilder_ == null) { - regionStatisticsBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionStatisticsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder>( - regionStatistics_, + getRegionStatistics(), getParentForChildren(), isClean()); regionStatistics_ = null; } return regionStatisticsBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MultiResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.MultiResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse DEFAULT_INSTANCE; static { - defaultInstance = new MultiResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MultiResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MultiResponse) } /** @@ -39138,192 +40070,192 @@ public final class ClientProtos { // @@protoc_insertion_point(class_scope:hbase.pb.ClientService) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Authorizations_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Authorizations_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CellVisibility_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CellVisibility_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Column_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Column_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Get_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Get_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Result_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Result_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Condition_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Condition_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MutationProto_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MutationProto_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MutationProto_ColumnValue_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MutateRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MutateRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MutateResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MutateResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Scan_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Scan_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ScanRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ScanRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ScanResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ScanResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkLoadHFileRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DelegationToken_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DelegationToken_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CoprocessorServiceCall_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CoprocessorServiceResult_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CoprocessorServiceRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CoprocessorServiceResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Action_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Action_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionAction_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionAction_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionLoadStats_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MultiRegionLoadStats_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ResultOrException_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ResultOrException_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionActionResult_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionActionResult_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MultiRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MultiRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MultiResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MultiResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -39487,229 +40419,13 @@ public final class ClientProtos { "tobuf.generatedB\014ClientProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_Authorizations_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_Authorizations_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Authorizations_descriptor, - new java.lang.String[] { "Label", }); - internal_static_hbase_pb_CellVisibility_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_CellVisibility_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CellVisibility_descriptor, - new java.lang.String[] { "Expression", }); - internal_static_hbase_pb_Column_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_Column_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Column_descriptor, - new java.lang.String[] { "Family", "Qualifier", }); - internal_static_hbase_pb_Get_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_Get_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Get_descriptor, - new java.lang.String[] { "Row", "Column", "Attribute", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", "ExistenceOnly", "Consistency", "CfTimeRange", }); - internal_static_hbase_pb_Result_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_Result_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Result_descriptor, - new java.lang.String[] { "Cell", "AssociatedCellCount", "Exists", "Stale", "Partial", }); - internal_static_hbase_pb_GetRequest_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_GetRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetRequest_descriptor, - new java.lang.String[] { "Region", "Get", }); - internal_static_hbase_pb_GetResponse_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_GetResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetResponse_descriptor, - new java.lang.String[] { "Result", }); - internal_static_hbase_pb_Condition_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_Condition_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Condition_descriptor, - new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", }); - internal_static_hbase_pb_MutationProto_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_MutationProto_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MutationProto_descriptor, - new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Timestamp", "Attribute", "Durability", "TimeRange", "AssociatedCellCount", "Nonce", }); - internal_static_hbase_pb_MutationProto_ColumnValue_descriptor = - internal_static_hbase_pb_MutationProto_descriptor.getNestedTypes().get(0); - internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MutationProto_ColumnValue_descriptor, - new java.lang.String[] { "Family", "QualifierValue", }); - internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor = - internal_static_hbase_pb_MutationProto_ColumnValue_descriptor.getNestedTypes().get(0); - internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor, - new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", "Tags", }); - internal_static_hbase_pb_MutateRequest_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_hbase_pb_MutateRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MutateRequest_descriptor, - new java.lang.String[] { "Region", "Mutation", "Condition", "NonceGroup", }); - internal_static_hbase_pb_MutateResponse_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_hbase_pb_MutateResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MutateResponse_descriptor, - new java.lang.String[] { "Result", "Processed", }); - internal_static_hbase_pb_Scan_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_hbase_pb_Scan_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Scan_descriptor, - new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", "Small", "Reversed", "Consistency", "Caching", "AllowPartialResults", "CfTimeRange", }); - internal_static_hbase_pb_ScanRequest_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_hbase_pb_ScanRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ScanRequest_descriptor, - new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", "ClientHandlesPartials", "ClientHandlesHeartbeats", "TrackScanMetrics", "Renew", }); - internal_static_hbase_pb_ScanResponse_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_hbase_pb_ScanResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ScanResponse_descriptor, - new java.lang.String[] { "CellsPerResult", "ScannerId", "MoreResults", "Ttl", "Results", "Stale", "PartialFlagPerResult", "MoreResultsInRegion", "HeartbeatMessage", "ScanMetrics", }); - internal_static_hbase_pb_BulkLoadHFileRequest_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BulkLoadHFileRequest_descriptor, - new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", "FsToken", "BulkToken", "CopyFile", }); - internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor = - internal_static_hbase_pb_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); - internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor, - new java.lang.String[] { "Family", "Path", }); - internal_static_hbase_pb_BulkLoadHFileResponse_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BulkLoadHFileResponse_descriptor, - new java.lang.String[] { "Loaded", }); - internal_static_hbase_pb_DelegationToken_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_hbase_pb_DelegationToken_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DelegationToken_descriptor, - new java.lang.String[] { "Identifier", "Password", "Kind", "Service", }); - internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor, - new java.lang.String[] { "TableName", "Region", }); - internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor, - new java.lang.String[] { "BulkToken", }); - internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor, - new java.lang.String[] { "BulkToken", "Region", }); - internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_CoprocessorServiceCall_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CoprocessorServiceCall_descriptor, - new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", }); - internal_static_hbase_pb_CoprocessorServiceResult_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CoprocessorServiceResult_descriptor, - new java.lang.String[] { "Value", }); - internal_static_hbase_pb_CoprocessorServiceRequest_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CoprocessorServiceRequest_descriptor, - new java.lang.String[] { "Region", "Call", }); - internal_static_hbase_pb_CoprocessorServiceResponse_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CoprocessorServiceResponse_descriptor, - new java.lang.String[] { "Region", "Value", }); - internal_static_hbase_pb_Action_descriptor = - getDescriptor().getMessageTypes().get(25); - internal_static_hbase_pb_Action_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Action_descriptor, - new java.lang.String[] { "Index", "Mutation", "Get", "ServiceCall", }); - internal_static_hbase_pb_RegionAction_descriptor = - getDescriptor().getMessageTypes().get(26); - internal_static_hbase_pb_RegionAction_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionAction_descriptor, - new java.lang.String[] { "Region", "Atomic", "Action", }); - internal_static_hbase_pb_RegionLoadStats_descriptor = - getDescriptor().getMessageTypes().get(27); - internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionLoadStats_descriptor, - new java.lang.String[] { "MemstoreLoad", "HeapOccupancy", "CompactionPressure", }); - internal_static_hbase_pb_MultiRegionLoadStats_descriptor = - getDescriptor().getMessageTypes().get(28); - internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MultiRegionLoadStats_descriptor, - new java.lang.String[] { "Region", "Stat", }); - internal_static_hbase_pb_ResultOrException_descriptor = - getDescriptor().getMessageTypes().get(29); - internal_static_hbase_pb_ResultOrException_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ResultOrException_descriptor, - new java.lang.String[] { "Index", "Result", "Exception", "ServiceResult", "LoadStats", }); - internal_static_hbase_pb_RegionActionResult_descriptor = - getDescriptor().getMessageTypes().get(30); - internal_static_hbase_pb_RegionActionResult_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionActionResult_descriptor, - new java.lang.String[] { "ResultOrException", "Exception", }); - internal_static_hbase_pb_MultiRequest_descriptor = - getDescriptor().getMessageTypes().get(31); - internal_static_hbase_pb_MultiRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MultiRequest_descriptor, - new java.lang.String[] { "RegionAction", "NonceGroup", "Condition", }); - internal_static_hbase_pb_MultiResponse_descriptor = - getDescriptor().getMessageTypes().get(32); - internal_static_hbase_pb_MultiResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MultiResponse_descriptor, - new java.lang.String[] { "RegionActionResult", "Processed", "RegionStatistics", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { @@ -39719,6 +40435,227 @@ public final class ClientProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_Authorizations_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_Authorizations_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Authorizations_descriptor, + new java.lang.String[] { "Label", }); + internal_static_hbase_pb_CellVisibility_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_CellVisibility_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CellVisibility_descriptor, + new java.lang.String[] { "Expression", }); + internal_static_hbase_pb_Column_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_Column_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Column_descriptor, + new java.lang.String[] { "Family", "Qualifier", }); + internal_static_hbase_pb_Get_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_Get_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Get_descriptor, + new java.lang.String[] { "Row", "Column", "Attribute", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", "ExistenceOnly", "Consistency", "CfTimeRange", }); + internal_static_hbase_pb_Result_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_Result_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Result_descriptor, + new java.lang.String[] { "Cell", "AssociatedCellCount", "Exists", "Stale", "Partial", }); + internal_static_hbase_pb_GetRequest_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_GetRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetRequest_descriptor, + new java.lang.String[] { "Region", "Get", }); + internal_static_hbase_pb_GetResponse_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_GetResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetResponse_descriptor, + new java.lang.String[] { "Result", }); + internal_static_hbase_pb_Condition_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_Condition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Condition_descriptor, + new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", }); + internal_static_hbase_pb_MutationProto_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_MutationProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MutationProto_descriptor, + new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Timestamp", "Attribute", "Durability", "TimeRange", "AssociatedCellCount", "Nonce", }); + internal_static_hbase_pb_MutationProto_ColumnValue_descriptor = + internal_static_hbase_pb_MutationProto_descriptor.getNestedTypes().get(0); + internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MutationProto_ColumnValue_descriptor, + new java.lang.String[] { "Family", "QualifierValue", }); + internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor = + internal_static_hbase_pb_MutationProto_ColumnValue_descriptor.getNestedTypes().get(0); + internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor, + new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", "Tags", }); + internal_static_hbase_pb_MutateRequest_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_hbase_pb_MutateRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MutateRequest_descriptor, + new java.lang.String[] { "Region", "Mutation", "Condition", "NonceGroup", }); + internal_static_hbase_pb_MutateResponse_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_hbase_pb_MutateResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MutateResponse_descriptor, + new java.lang.String[] { "Result", "Processed", }); + internal_static_hbase_pb_Scan_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_hbase_pb_Scan_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Scan_descriptor, + new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", "Small", "Reversed", "Consistency", "Caching", "AllowPartialResults", "CfTimeRange", }); + internal_static_hbase_pb_ScanRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_hbase_pb_ScanRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ScanRequest_descriptor, + new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", "ClientHandlesPartials", "ClientHandlesHeartbeats", "TrackScanMetrics", "Renew", }); + internal_static_hbase_pb_ScanResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_hbase_pb_ScanResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ScanResponse_descriptor, + new java.lang.String[] { "CellsPerResult", "ScannerId", "MoreResults", "Ttl", "Results", "Stale", "PartialFlagPerResult", "MoreResultsInRegion", "HeartbeatMessage", "ScanMetrics", }); + internal_static_hbase_pb_BulkLoadHFileRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BulkLoadHFileRequest_descriptor, + new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", "FsToken", "BulkToken", "CopyFile", }); + internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor = + internal_static_hbase_pb_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); + internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor, + new java.lang.String[] { "Family", "Path", }); + internal_static_hbase_pb_BulkLoadHFileResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BulkLoadHFileResponse_descriptor, + new java.lang.String[] { "Loaded", }); + internal_static_hbase_pb_DelegationToken_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_hbase_pb_DelegationToken_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DelegationToken_descriptor, + new java.lang.String[] { "Identifier", "Password", "Kind", "Service", }); + internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor, + new java.lang.String[] { "TableName", "Region", }); + internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor, + new java.lang.String[] { "BulkToken", }); + internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor, + new java.lang.String[] { "BulkToken", "Region", }); + internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_CoprocessorServiceCall_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CoprocessorServiceCall_descriptor, + new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", }); + internal_static_hbase_pb_CoprocessorServiceResult_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CoprocessorServiceResult_descriptor, + new java.lang.String[] { "Value", }); + internal_static_hbase_pb_CoprocessorServiceRequest_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CoprocessorServiceRequest_descriptor, + new java.lang.String[] { "Region", "Call", }); + internal_static_hbase_pb_CoprocessorServiceResponse_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CoprocessorServiceResponse_descriptor, + new java.lang.String[] { "Region", "Value", }); + internal_static_hbase_pb_Action_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_hbase_pb_Action_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Action_descriptor, + new java.lang.String[] { "Index", "Mutation", "Get", "ServiceCall", }); + internal_static_hbase_pb_RegionAction_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_hbase_pb_RegionAction_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionAction_descriptor, + new java.lang.String[] { "Region", "Atomic", "Action", }); + internal_static_hbase_pb_RegionLoadStats_descriptor = + getDescriptor().getMessageTypes().get(27); + internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionLoadStats_descriptor, + new java.lang.String[] { "MemstoreLoad", "HeapOccupancy", "CompactionPressure", }); + internal_static_hbase_pb_MultiRegionLoadStats_descriptor = + getDescriptor().getMessageTypes().get(28); + internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MultiRegionLoadStats_descriptor, + new java.lang.String[] { "Region", "Stat", }); + internal_static_hbase_pb_ResultOrException_descriptor = + getDescriptor().getMessageTypes().get(29); + internal_static_hbase_pb_ResultOrException_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ResultOrException_descriptor, + new java.lang.String[] { "Index", "Result", "Exception", "ServiceResult", "LoadStats", }); + internal_static_hbase_pb_RegionActionResult_descriptor = + getDescriptor().getMessageTypes().get(30); + internal_static_hbase_pb_RegionActionResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionActionResult_descriptor, + new java.lang.String[] { "ResultOrException", "Exception", }); + internal_static_hbase_pb_MultiRequest_descriptor = + getDescriptor().getMessageTypes().get(31); + internal_static_hbase_pb_MultiRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MultiRequest_descriptor, + new java.lang.String[] { "RegionAction", "NonceGroup", "Condition", }); + internal_static_hbase_pb_MultiResponse_descriptor = + getDescriptor().getMessageTypes().get(32); + internal_static_hbase_pb_MultiResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MultiResponse_descriptor, + new java.lang.String[] { "RegionActionResult", "Processed", "RegionStatistics", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java index 17f7dfb9..bd132af 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java @@ -6,77 +6,75 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class ClusterIdProtos { private ClusterIdProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface ClusterIdOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ClusterIdOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ClusterId) + com.google.protobuf.MessageOrBuilder { - // required string cluster_id = 1; /** - * required string cluster_id = 1; - * *
      * This is the cluster id, a uuid as a String
      * 
+ * + * required string cluster_id = 1; */ boolean hasClusterId(); /** - * required string cluster_id = 1; - * *
      * This is the cluster id, a uuid as a String
      * 
+ * + * required string cluster_id = 1; */ java.lang.String getClusterId(); /** - * required string cluster_id = 1; - * *
      * This is the cluster id, a uuid as a String
      * 
+ * + * required string cluster_id = 1; */ com.google.protobuf.ByteString getClusterIdBytes(); } /** - * Protobuf type {@code hbase.pb.ClusterId} - * *
    **
    * Content of the '/hbase/hbaseid', cluster id, znode.
    * Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
    * 
+ * + * Protobuf type {@code hbase.pb.ClusterId} */ - public static final class ClusterId extends - com.google.protobuf.GeneratedMessage - implements ClusterIdOrBuilder { + public static final class ClusterId extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ClusterId) + ClusterIdOrBuilder { // Use ClusterId.newBuilder() to construct. - private ClusterId(com.google.protobuf.GeneratedMessage.Builder builder) { + private ClusterId(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ClusterId(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ClusterId defaultInstance; - public static ClusterId getDefaultInstance() { - return defaultInstance; } - - public ClusterId getDefaultInstanceForType() { - return defaultInstance; + private ClusterId() { + clusterId_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ClusterId( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -96,8 +94,9 @@ public final class ClusterIdProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - clusterId_ = input.readBytes(); + clusterId_ = bs; break; } } @@ -106,7 +105,7 @@ public final class ClusterIdProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -117,48 +116,32 @@ public final class ClusterIdProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ClusterId parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ClusterId(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string cluster_id = 1; public static final int CLUSTER_ID_FIELD_NUMBER = 1; - private java.lang.Object clusterId_; + private volatile java.lang.Object clusterId_; /** - * required string cluster_id = 1; - * *
      * This is the cluster id, a uuid as a String
      * 
+ * + * required string cluster_id = 1; */ public boolean hasClusterId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string cluster_id = 1; - * *
      * This is the cluster id, a uuid as a String
      * 
+ * + * required string cluster_id = 1; */ public java.lang.String getClusterId() { java.lang.Object ref = clusterId_; @@ -175,11 +158,11 @@ public final class ClusterIdProtos { } } /** - * required string cluster_id = 1; - * *
      * This is the cluster id, a uuid as a String
      * 
+ * + * required string cluster_id = 1; */ public com.google.protobuf.ByteString getClusterIdBytes() { @@ -195,13 +178,11 @@ public final class ClusterIdProtos { } } - private void initFields() { - clusterId_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasClusterId()) { memoizedIsInitialized = 0; @@ -213,36 +194,27 @@ public final class ClusterIdProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getClusterIdBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, clusterId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getClusterIdBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, clusterId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -258,12 +230,10 @@ public final class ClusterIdProtos { result = result && getClusterId() .equals(other.getClusterId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -275,7 +245,7 @@ public final class ClusterIdProtos { hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER; hash = (53 * hash) + getClusterId().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -303,67 +273,79 @@ public final class ClusterIdProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ClusterId} - * *
      **
      * Content of the '/hbase/hbaseid', cluster id, znode.
      * Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
      * 
+ * + * Protobuf type {@code hbase.pb.ClusterId} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ClusterId) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -376,18 +358,15 @@ public final class ClusterIdProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); clusterId_ = ""; @@ -395,10 +374,6 @@ public final class ClusterIdProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_descriptor; @@ -429,6 +404,32 @@ public final class ClusterIdProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId)other); @@ -445,13 +446,13 @@ public final class ClusterIdProtos { clusterId_ = other.clusterId_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasClusterId()) { - return false; } return true; @@ -466,7 +467,7 @@ public final class ClusterIdProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -476,42 +477,44 @@ public final class ClusterIdProtos { } private int bitField0_; - // required string cluster_id = 1; private java.lang.Object clusterId_ = ""; /** - * required string cluster_id = 1; - * *
        * This is the cluster id, a uuid as a String
        * 
+ * + * required string cluster_id = 1; */ public boolean hasClusterId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string cluster_id = 1; - * *
        * This is the cluster id, a uuid as a String
        * 
+ * + * required string cluster_id = 1; */ public java.lang.String getClusterId() { java.lang.Object ref = clusterId_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - clusterId_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + clusterId_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * required string cluster_id = 1; - * *
        * This is the cluster id, a uuid as a String
        * 
+ * + * required string cluster_id = 1; */ public com.google.protobuf.ByteString getClusterIdBytes() { @@ -527,11 +530,11 @@ public final class ClusterIdProtos { } } /** - * required string cluster_id = 1; - * *
        * This is the cluster id, a uuid as a String
        * 
+ * + * required string cluster_id = 1; */ public Builder setClusterId( java.lang.String value) { @@ -544,11 +547,11 @@ public final class ClusterIdProtos { return this; } /** - * required string cluster_id = 1; - * *
        * This is the cluster id, a uuid as a String
        * 
+ * + * required string cluster_id = 1; */ public Builder clearClusterId() { bitField0_ = (bitField0_ & ~0x00000001); @@ -557,11 +560,11 @@ public final class ClusterIdProtos { return this; } /** - * required string cluster_id = 1; - * *
        * This is the cluster id, a uuid as a String
        * 
+ * + * required string cluster_id = 1; */ public Builder setClusterIdBytes( com.google.protobuf.ByteString value) { @@ -573,29 +576,66 @@ public final class ClusterIdProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ClusterId) } + // @@protoc_insertion_point(class_scope:hbase.pb.ClusterId) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId DEFAULT_INSTANCE; static { - defaultInstance = new ClusterId(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ClusterId parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClusterId(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ClusterId) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ClusterId_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ClusterId_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -605,23 +645,23 @@ public final class ClusterIdProtos { "erIdProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_ClusterId_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_ClusterId_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ClusterId_descriptor, - new java.lang.String[] { "ClusterId", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_hbase_pb_ClusterId_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_ClusterId_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ClusterId_descriptor, + new java.lang.String[] { "ClusterId", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java index c9e34d9..f945184 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class ClusterStatusProtos { private ClusterStatusProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface RegionStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionStateOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionState) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionInfo region_info = 1; /** * required .hbase.pb.RegionInfo region_info = 1; */ @@ -25,7 +31,6 @@ public final class ClusterStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); - // required .hbase.pb.RegionState.State state = 2; /** * required .hbase.pb.RegionState.State state = 2; */ @@ -35,7 +40,6 @@ public final class ClusterStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState(); - // optional uint64 stamp = 3; /** * optional uint64 stamp = 3; */ @@ -48,36 +52,29 @@ public final class ClusterStatusProtos { /** * Protobuf type {@code hbase.pb.RegionState} */ - public static final class RegionState extends - com.google.protobuf.GeneratedMessage - implements RegionStateOrBuilder { + public static final class RegionState extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionState) + RegionStateOrBuilder { // Use RegionState.newBuilder() to construct. - private RegionState(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionState(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionState defaultInstance; - public static RegionState getDefaultInstance() { - return defaultInstance; } - - public RegionState getDefaultInstanceForType() { - return defaultInstance; + private RegionState() { + state_ = 0; + stamp_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -116,7 +113,7 @@ public final class ClusterStatusProtos { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; - state_ = value; + state_ = rawValue; } break; } @@ -131,7 +128,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -142,282 +139,277 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionState parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionState(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.RegionState.State} */ public enum State implements com.google.protobuf.ProtocolMessageEnum { /** - * OFFLINE = 0; - * *
        * region is in an offline state
        * 
+ * + * OFFLINE = 0; */ - OFFLINE(0, 0), + OFFLINE(0), /** - * PENDING_OPEN = 1; - * *
        * sent rpc to server to open but has not begun
        * 
+ * + * PENDING_OPEN = 1; */ - PENDING_OPEN(1, 1), + PENDING_OPEN(1), /** - * OPENING = 2; - * *
        * server has begun to open but not yet done
        * 
+ * + * OPENING = 2; */ - OPENING(2, 2), + OPENING(2), /** - * OPEN = 3; - * *
        * server opened region and updated meta
        * 
+ * + * OPEN = 3; */ - OPEN(3, 3), + OPEN(3), /** - * PENDING_CLOSE = 4; - * *
        * sent rpc to server to close but has not begun
        * 
+ * + * PENDING_CLOSE = 4; */ - PENDING_CLOSE(4, 4), + PENDING_CLOSE(4), /** - * CLOSING = 5; - * *
        * server has begun to close but not yet done
        * 
+ * + * CLOSING = 5; */ - CLOSING(5, 5), + CLOSING(5), /** - * CLOSED = 6; - * *
        * server closed region and updated meta
        * 
+ * + * CLOSED = 6; */ - CLOSED(6, 6), + CLOSED(6), /** - * SPLITTING = 7; - * *
        * server started split of a region
        * 
+ * + * SPLITTING = 7; */ - SPLITTING(7, 7), + SPLITTING(7), /** - * SPLIT = 8; - * *
        * server completed split of a region
        * 
+ * + * SPLIT = 8; */ - SPLIT(8, 8), + SPLIT(8), /** - * FAILED_OPEN = 9; - * *
        * failed to open, and won't retry any more
        * 
+ * + * FAILED_OPEN = 9; */ - FAILED_OPEN(9, 9), + FAILED_OPEN(9), /** - * FAILED_CLOSE = 10; - * *
        * failed to close, and won't retry any more
        * 
+ * + * FAILED_CLOSE = 10; */ - FAILED_CLOSE(10, 10), + FAILED_CLOSE(10), /** - * MERGING = 11; - * *
        * server started merge a region
        * 
+ * + * MERGING = 11; */ - MERGING(11, 11), + MERGING(11), /** - * MERGED = 12; - * *
        * server completed merge of a region
        * 
+ * + * MERGED = 12; */ - MERGED(12, 12), + MERGED(12), /** - * SPLITTING_NEW = 13; - * *
        * new region to be created when RS splits a parent
        * 
+ * + * SPLITTING_NEW = 13; */ - SPLITTING_NEW(13, 13), + SPLITTING_NEW(13), /** - * MERGING_NEW = 14; - * *
        * region but hasn't be created yet, or master doesn't
        * know it's already created
        * 
+ * + * MERGING_NEW = 14; */ - MERGING_NEW(14, 14), + MERGING_NEW(14), ; /** - * OFFLINE = 0; - * *
        * region is in an offline state
        * 
+ * + * OFFLINE = 0; */ public static final int OFFLINE_VALUE = 0; /** - * PENDING_OPEN = 1; - * *
        * sent rpc to server to open but has not begun
        * 
+ * + * PENDING_OPEN = 1; */ public static final int PENDING_OPEN_VALUE = 1; /** - * OPENING = 2; - * *
        * server has begun to open but not yet done
        * 
+ * + * OPENING = 2; */ public static final int OPENING_VALUE = 2; /** - * OPEN = 3; - * *
        * server opened region and updated meta
        * 
+ * + * OPEN = 3; */ public static final int OPEN_VALUE = 3; /** - * PENDING_CLOSE = 4; - * *
        * sent rpc to server to close but has not begun
        * 
+ * + * PENDING_CLOSE = 4; */ public static final int PENDING_CLOSE_VALUE = 4; /** - * CLOSING = 5; - * *
        * server has begun to close but not yet done
        * 
+ * + * CLOSING = 5; */ public static final int CLOSING_VALUE = 5; /** - * CLOSED = 6; - * *
        * server closed region and updated meta
        * 
+ * + * CLOSED = 6; */ public static final int CLOSED_VALUE = 6; /** - * SPLITTING = 7; - * *
        * server started split of a region
        * 
+ * + * SPLITTING = 7; */ public static final int SPLITTING_VALUE = 7; /** - * SPLIT = 8; - * *
        * server completed split of a region
        * 
+ * + * SPLIT = 8; */ public static final int SPLIT_VALUE = 8; /** - * FAILED_OPEN = 9; - * *
        * failed to open, and won't retry any more
        * 
+ * + * FAILED_OPEN = 9; */ public static final int FAILED_OPEN_VALUE = 9; /** - * FAILED_CLOSE = 10; - * *
        * failed to close, and won't retry any more
        * 
+ * + * FAILED_CLOSE = 10; */ public static final int FAILED_CLOSE_VALUE = 10; /** - * MERGING = 11; - * *
        * server started merge a region
        * 
+ * + * MERGING = 11; */ public static final int MERGING_VALUE = 11; /** - * MERGED = 12; - * *
        * server completed merge of a region
        * 
+ * + * MERGED = 12; */ public static final int MERGED_VALUE = 12; /** - * SPLITTING_NEW = 13; - * *
        * new region to be created when RS splits a parent
        * 
+ * + * SPLITTING_NEW = 13; */ public static final int SPLITTING_NEW_VALUE = 13; /** - * MERGING_NEW = 14; - * *
        * region but hasn't be created yet, or master doesn't
        * know it's already created
        * 
+ * + * MERGING_NEW = 14; */ public static final int MERGING_NEW_VALUE = 14; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static State valueOf(int value) { + return forNumber(value); + } + + public static State forNumber(int value) { switch (value) { case 0: return OFFLINE; case 1: return PENDING_OPEN; @@ -442,17 +434,17 @@ public final class ClusterStatusProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public State findValueByNumber(int number) { - return State.valueOf(number); + return State.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -474,11 +466,9 @@ public final class ClusterStatusProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private State(int index, int value) { - this.index = index; + private State(int value) { this.value = value; } @@ -486,7 +476,6 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required .hbase.pb.RegionInfo region_info = 1; public static final int REGION_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; /** @@ -499,18 +488,17 @@ public final class ClusterStatusProtos { * required .hbase.pb.RegionInfo region_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } /** * required .hbase.pb.RegionInfo region_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } - // required .hbase.pb.RegionState.State state = 2; public static final int STATE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State state_; + private int state_; /** * required .hbase.pb.RegionState.State state = 2; */ @@ -521,10 +509,10 @@ public final class ClusterStatusProtos { * required .hbase.pb.RegionState.State state = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE : result; } - // optional uint64 stamp = 3; public static final int STAMP_FIELD_NUMBER = 3; private long stamp_; /** @@ -540,15 +528,11 @@ public final class ClusterStatusProtos { return stamp_; } - private void initFields() { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; - stamp_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegionInfo()) { memoizedIsInitialized = 0; @@ -568,50 +552,42 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionInfo_); + output.writeMessage(1, getRegionInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, state_.getNumber()); + output.writeEnum(2, state_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, stamp_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionInfo_); + .computeMessageSize(1, getRegionInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, state_.getNumber()); + .computeEnumSize(2, state_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, stamp_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -629,20 +605,17 @@ public final class ClusterStatusProtos { } result = result && (hasState() == other.hasState()); if (hasState()) { - result = result && - (getState() == other.getState()); + result = result && state_ == other.state_; } result = result && (hasStamp() == other.hasStamp()); if (hasStamp()) { result = result && (getStamp() == other.getStamp()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -656,13 +629,14 @@ public final class ClusterStatusProtos { } if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); + hash = (53 * hash) + state_; } if (hasStamp()) { hash = (37 * hash) + STAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getStamp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStamp()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -690,46 +664,57 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -737,14 +722,15 @@ public final class ClusterStatusProtos { * Protobuf type {@code hbase.pb.RegionState} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionState) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionState_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -757,38 +743,31 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + regionInfo_ = null; } else { regionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + state_ = 0; bitField0_ = (bitField0_ & ~0x00000002); stamp_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionState_descriptor; @@ -831,6 +810,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState)other); @@ -851,21 +856,19 @@ public final class ClusterStatusProtos { if (other.hasStamp()) { setStamp(other.getStamp()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionInfo()) { - return false; } if (!hasState()) { - return false; } if (!getRegionInfo().isInitialized()) { - return false; } return true; @@ -880,7 +883,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -890,9 +893,8 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required .hbase.pb.RegionInfo region_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** * required .hbase.pb.RegionInfo region_info = 1; @@ -905,7 +907,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { if (regionInfoBuilder_ == null) { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } else { return regionInfoBuilder_.getMessage(); } @@ -946,6 +948,7 @@ public final class ClusterStatusProtos { public Builder mergeRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + regionInfo_ != null && regionInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); @@ -964,7 +967,7 @@ public final class ClusterStatusProtos { */ public Builder clearRegionInfo() { if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + regionInfo_ = null; onChanged(); } else { regionInfoBuilder_.clear(); @@ -987,19 +990,20 @@ public final class ClusterStatusProtos { if (regionInfoBuilder_ != null) { return regionInfoBuilder_.getMessageOrBuilder(); } else { - return regionInfo_; + return regionInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } } /** * required .hbase.pb.RegionInfo region_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - regionInfo_, + getRegionInfo(), getParentForChildren(), isClean()); regionInfo_ = null; @@ -1007,8 +1011,7 @@ public final class ClusterStatusProtos { return regionInfoBuilder_; } - // required .hbase.pb.RegionState.State state = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + private int state_ = 0; /** * required .hbase.pb.RegionState.State state = 2; */ @@ -1019,7 +1022,8 @@ public final class ClusterStatusProtos { * required .hbase.pb.RegionState.State state = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE : result; } /** * required .hbase.pb.RegionState.State state = 2; @@ -1029,7 +1033,7 @@ public final class ClusterStatusProtos { throw new NullPointerException(); } bitField0_ |= 0x00000002; - state_ = value; + state_ = value.getNumber(); onChanged(); return this; } @@ -1038,12 +1042,11 @@ public final class ClusterStatusProtos { */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000002); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + state_ = 0; onChanged(); return this; } - // optional uint64 stamp = 3; private long stamp_ ; /** * optional uint64 stamp = 3; @@ -1075,22 +1078,59 @@ public final class ClusterStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionState) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionState) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState DEFAULT_INSTANCE; static { - defaultInstance = new RegionState(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionState(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionState) } - public interface RegionInTransitionOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionInTransitionOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionInTransition) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier spec = 1; /** * required .hbase.pb.RegionSpecifier spec = 1; */ @@ -1104,7 +1144,6 @@ public final class ClusterStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder(); - // required .hbase.pb.RegionState region_state = 2; /** * required .hbase.pb.RegionState region_state = 2; */ @@ -1121,36 +1160,27 @@ public final class ClusterStatusProtos { /** * Protobuf type {@code hbase.pb.RegionInTransition} */ - public static final class RegionInTransition extends - com.google.protobuf.GeneratedMessage - implements RegionInTransitionOrBuilder { + public static final class RegionInTransition extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionInTransition) + RegionInTransitionOrBuilder { // Use RegionInTransition.newBuilder() to construct. - private RegionInTransition(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionInTransition(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionInTransition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionInTransition defaultInstance; - public static RegionInTransition getDefaultInstance() { - return defaultInstance; } - - public RegionInTransition getDefaultInstanceForType() { - return defaultInstance; + private RegionInTransition() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionInTransition( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1201,7 +1231,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1212,30 +1242,14 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionInTransition_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionInTransition_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionInTransition parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionInTransition(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier spec = 1; public static final int SPEC_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier spec_; /** @@ -1248,16 +1262,15 @@ public final class ClusterStatusProtos { * required .hbase.pb.RegionSpecifier spec = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { - return spec_; + return spec_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : spec_; } /** * required .hbase.pb.RegionSpecifier spec = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() { - return spec_; + return spec_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : spec_; } - // required .hbase.pb.RegionState region_state = 2; public static final int REGION_STATE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState regionState_; /** @@ -1270,23 +1283,20 @@ public final class ClusterStatusProtos { * required .hbase.pb.RegionState region_state = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { - return regionState_; + return regionState_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance() : regionState_; } /** * required .hbase.pb.RegionState region_state = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() { - return regionState_; + return regionState_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance() : regionState_; } - private void initFields() { - spec_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - regionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasSpec()) { memoizedIsInitialized = 0; @@ -1310,43 +1320,35 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, spec_); + output.writeMessage(1, getSpec()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, regionState_); + output.writeMessage(2, getRegionState()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, spec_); + .computeMessageSize(1, getSpec()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, regionState_); + .computeMessageSize(2, getRegionState()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1367,12 +1369,10 @@ public final class ClusterStatusProtos { result = result && getRegionState() .equals(other.getRegionState()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1388,7 +1388,7 @@ public final class ClusterStatusProtos { hash = (37 * hash) + REGION_STATE_FIELD_NUMBER; hash = (53 * hash) + getRegionState().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1416,46 +1416,57 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1463,14 +1474,15 @@ public final class ClusterStatusProtos { * Protobuf type {@code hbase.pb.RegionInTransition} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionInTransition) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionInTransition_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionInTransition_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1483,30 +1495,27 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSpecFieldBuilder(); getRegionStateFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (specBuilder_ == null) { - spec_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + spec_ = null; } else { specBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (regionStateBuilder_ == null) { - regionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); + regionState_ = null; } else { regionStateBuilder_.clear(); } @@ -1514,10 +1523,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionInTransition_descriptor; @@ -1560,6 +1565,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition)other); @@ -1577,25 +1608,22 @@ public final class ClusterStatusProtos { if (other.hasRegionState()) { mergeRegionState(other.getRegionState()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasSpec()) { - return false; } if (!hasRegionState()) { - return false; } if (!getSpec().isInitialized()) { - return false; } if (!getRegionState().isInitialized()) { - return false; } return true; @@ -1610,7 +1638,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1620,9 +1648,8 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier spec = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier spec_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier spec_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> specBuilder_; /** * required .hbase.pb.RegionSpecifier spec = 1; @@ -1635,7 +1662,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { if (specBuilder_ == null) { - return spec_; + return spec_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : spec_; } else { return specBuilder_.getMessage(); } @@ -1676,6 +1703,7 @@ public final class ClusterStatusProtos { public Builder mergeSpec(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (specBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + spec_ != null && spec_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { spec_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(spec_).mergeFrom(value).buildPartial(); @@ -1694,7 +1722,7 @@ public final class ClusterStatusProtos { */ public Builder clearSpec() { if (specBuilder_ == null) { - spec_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + spec_ = null; onChanged(); } else { specBuilder_.clear(); @@ -1717,19 +1745,20 @@ public final class ClusterStatusProtos { if (specBuilder_ != null) { return specBuilder_.getMessageOrBuilder(); } else { - return spec_; + return spec_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : spec_; } } /** * required .hbase.pb.RegionSpecifier spec = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getSpecFieldBuilder() { if (specBuilder_ == null) { - specBuilder_ = new com.google.protobuf.SingleFieldBuilder< + specBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - spec_, + getSpec(), getParentForChildren(), isClean()); spec_ = null; @@ -1737,9 +1766,8 @@ public final class ClusterStatusProtos { return specBuilder_; } - // required .hbase.pb.RegionState region_state = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState regionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState regionState_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> regionStateBuilder_; /** * required .hbase.pb.RegionState region_state = 2; @@ -1752,7 +1780,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { if (regionStateBuilder_ == null) { - return regionState_; + return regionState_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance() : regionState_; } else { return regionStateBuilder_.getMessage(); } @@ -1793,6 +1821,7 @@ public final class ClusterStatusProtos { public Builder mergeRegionState(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState value) { if (regionStateBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + regionState_ != null && regionState_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance()) { regionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder(regionState_).mergeFrom(value).buildPartial(); @@ -1811,7 +1840,7 @@ public final class ClusterStatusProtos { */ public Builder clearRegionState() { if (regionStateBuilder_ == null) { - regionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); + regionState_ = null; onChanged(); } else { regionStateBuilder_.clear(); @@ -1834,41 +1863,79 @@ public final class ClusterStatusProtos { if (regionStateBuilder_ != null) { return regionStateBuilder_.getMessageOrBuilder(); } else { - return regionState_; + return regionState_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance() : regionState_; } } /** * required .hbase.pb.RegionState region_state = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> getRegionStateFieldBuilder() { if (regionStateBuilder_ == null) { - regionStateBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionStateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder>( - regionState_, + getRegionState(), getParentForChildren(), isClean()); regionState_ = null; } return regionStateBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionInTransition) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionInTransition) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition DEFAULT_INSTANCE; static { - defaultInstance = new RegionInTransition(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionInTransition parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionInTransition(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionInTransition) } - public interface StoreSequenceIdOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StoreSequenceIdOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.StoreSequenceId) + com.google.protobuf.MessageOrBuilder { - // required bytes family_name = 1; /** * required bytes family_name = 1; */ @@ -1878,7 +1945,6 @@ public final class ClusterStatusProtos { */ com.google.protobuf.ByteString getFamilyName(); - // required uint64 sequence_id = 2; /** * required uint64 sequence_id = 2; */ @@ -1889,43 +1955,36 @@ public final class ClusterStatusProtos { long getSequenceId(); } /** - * Protobuf type {@code hbase.pb.StoreSequenceId} - * *
    **
    * sequence Id of a store
    * 
+ * + * Protobuf type {@code hbase.pb.StoreSequenceId} */ - public static final class StoreSequenceId extends - com.google.protobuf.GeneratedMessage - implements StoreSequenceIdOrBuilder { + public static final class StoreSequenceId extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.StoreSequenceId) + StoreSequenceIdOrBuilder { // Use StoreSequenceId.newBuilder() to construct. - private StoreSequenceId(com.google.protobuf.GeneratedMessage.Builder builder) { + private StoreSequenceId(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StoreSequenceId(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StoreSequenceId defaultInstance; - public static StoreSequenceId getDefaultInstance() { - return defaultInstance; } - - public StoreSequenceId getDefaultInstanceForType() { - return defaultInstance; + private StoreSequenceId() { + familyName_ = com.google.protobuf.ByteString.EMPTY; + sequenceId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StoreSequenceId( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1960,7 +2019,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1971,30 +2030,14 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_StoreSequenceId_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_StoreSequenceId_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StoreSequenceId parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StoreSequenceId(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes family_name = 1; public static final int FAMILY_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString familyName_; /** @@ -2010,7 +2053,6 @@ public final class ClusterStatusProtos { return familyName_; } - // required uint64 sequence_id = 2; public static final int SEQUENCE_ID_FIELD_NUMBER = 2; private long sequenceId_; /** @@ -2026,14 +2068,11 @@ public final class ClusterStatusProtos { return sequenceId_; } - private void initFields() { - familyName_ = com.google.protobuf.ByteString.EMPTY; - sequenceId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFamilyName()) { memoizedIsInitialized = 0; @@ -2049,19 +2088,17 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, sequenceId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -2073,19 +2110,13 @@ public final class ClusterStatusProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, sequenceId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2106,12 +2137,10 @@ public final class ClusterStatusProtos { result = result && (getSequenceId() == other.getSequenceId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2125,9 +2154,10 @@ public final class ClusterStatusProtos { } if (hasSequenceId()) { hash = (37 * hash) + SEQUENCE_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getSequenceId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getSequenceId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2155,66 +2185,78 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.StoreSequenceId} - * *
      **
      * sequence Id of a store
      * 
+ * + * Protobuf type {@code hbase.pb.StoreSequenceId} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.StoreSequenceId) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_StoreSequenceId_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_StoreSequenceId_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2227,18 +2269,15 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); familyName_ = com.google.protobuf.ByteString.EMPTY; @@ -2248,10 +2287,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_StoreSequenceId_descriptor; @@ -2286,6 +2321,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId)other); @@ -2303,17 +2364,16 @@ public final class ClusterStatusProtos { if (other.hasSequenceId()) { setSequenceId(other.getSequenceId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFamilyName()) { - return false; } if (!hasSequenceId()) { - return false; } return true; @@ -2328,7 +2388,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2338,7 +2398,6 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required bytes family_name = 1; private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family_name = 1; @@ -2374,7 +2433,6 @@ public final class ClusterStatusProtos { return this; } - // required uint64 sequence_id = 2; private long sequenceId_ ; /** * required uint64 sequence_id = 2; @@ -2406,22 +2464,59 @@ public final class ClusterStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.StoreSequenceId) } + // @@protoc_insertion_point(class_scope:hbase.pb.StoreSequenceId) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId DEFAULT_INSTANCE; static { - defaultInstance = new StoreSequenceId(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StoreSequenceId parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StoreSequenceId(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.StoreSequenceId) } - public interface RegionStoreSequenceIdsOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionStoreSequenceIdsOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionStoreSequenceIds) + com.google.protobuf.MessageOrBuilder { - // required uint64 last_flushed_sequence_id = 1; /** * required uint64 last_flushed_sequence_id = 1; */ @@ -2431,7 +2526,6 @@ public final class ClusterStatusProtos { */ long getLastFlushedSequenceId(); - // repeated .hbase.pb.StoreSequenceId store_sequence_id = 2; /** * repeated .hbase.pb.StoreSequenceId store_sequence_id = 2; */ @@ -2457,44 +2551,37 @@ public final class ClusterStatusProtos { int index); } /** - * Protobuf type {@code hbase.pb.RegionStoreSequenceIds} - * *
    **
    * contains a sequence id of a region which should be the minimum of its store sequence ids and
    * list of sequence ids of the region's stores
    * 
+ * + * Protobuf type {@code hbase.pb.RegionStoreSequenceIds} */ - public static final class RegionStoreSequenceIds extends - com.google.protobuf.GeneratedMessage - implements RegionStoreSequenceIdsOrBuilder { + public static final class RegionStoreSequenceIds extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionStoreSequenceIds) + RegionStoreSequenceIdsOrBuilder { // Use RegionStoreSequenceIds.newBuilder() to construct. - private RegionStoreSequenceIds(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionStoreSequenceIds(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionStoreSequenceIds(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionStoreSequenceIds defaultInstance; - public static RegionStoreSequenceIds getDefaultInstance() { - return defaultInstance; } - - public RegionStoreSequenceIds getDefaultInstanceForType() { - return defaultInstance; + private RegionStoreSequenceIds() { + lastFlushedSequenceId_ = 0L; + storeSequenceId_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionStoreSequenceIds( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2523,7 +2610,8 @@ public final class ClusterStatusProtos { storeSequenceId_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - storeSequenceId_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.PARSER, extensionRegistry)); + storeSequenceId_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.PARSER, extensionRegistry)); break; } } @@ -2532,7 +2620,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { storeSequenceId_ = java.util.Collections.unmodifiableList(storeSequenceId_); @@ -2546,30 +2634,14 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionStoreSequenceIds_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionStoreSequenceIds_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionStoreSequenceIds parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionStoreSequenceIds(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint64 last_flushed_sequence_id = 1; public static final int LAST_FLUSHED_SEQUENCE_ID_FIELD_NUMBER = 1; private long lastFlushedSequenceId_; /** @@ -2585,7 +2657,6 @@ public final class ClusterStatusProtos { return lastFlushedSequenceId_; } - // repeated .hbase.pb.StoreSequenceId store_sequence_id = 2; public static final int STORE_SEQUENCE_ID_FIELD_NUMBER = 2; private java.util.List storeSequenceId_; /** @@ -2621,14 +2692,11 @@ public final class ClusterStatusProtos { return storeSequenceId_.get(index); } - private void initFields() { - lastFlushedSequenceId_ = 0L; - storeSequenceId_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasLastFlushedSequenceId()) { memoizedIsInitialized = 0; @@ -2646,19 +2714,17 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, lastFlushedSequenceId_); } for (int i = 0; i < storeSequenceId_.size(); i++) { output.writeMessage(2, storeSequenceId_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -2670,19 +2736,13 @@ public final class ClusterStatusProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, storeSequenceId_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2700,12 +2760,10 @@ public final class ClusterStatusProtos { } result = result && getStoreSequenceIdList() .equals(other.getStoreSequenceIdList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2715,13 +2773,14 @@ public final class ClusterStatusProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLastFlushedSequenceId()) { hash = (37 * hash) + LAST_FLUSHED_SEQUENCE_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLastFlushedSequenceId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLastFlushedSequenceId()); } if (getStoreSequenceIdCount() > 0) { hash = (37 * hash) + STORE_SEQUENCE_ID_FIELD_NUMBER; hash = (53 * hash) + getStoreSequenceIdList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2749,67 +2808,79 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RegionStoreSequenceIds} - * *
      **
      * contains a sequence id of a region which should be the minimum of its store sequence ids and
      * list of sequence ids of the region's stores
      * 
+ * + * Protobuf type {@code hbase.pb.RegionStoreSequenceIds} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIdsOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionStoreSequenceIds) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIdsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionStoreSequenceIds_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionStoreSequenceIds_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2822,19 +2893,16 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getStoreSequenceIdFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); lastFlushedSequenceId_ = 0L; @@ -2848,10 +2916,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionStoreSequenceIds_descriptor; @@ -2891,6 +2955,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds)other); @@ -2924,25 +3014,24 @@ public final class ClusterStatusProtos { storeSequenceId_ = other.storeSequenceId_; bitField0_ = (bitField0_ & ~0x00000002); storeSequenceIdBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStoreSequenceIdFieldBuilder() : null; } else { storeSequenceIdBuilder_.addAllMessages(other.storeSequenceId_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasLastFlushedSequenceId()) { - return false; } for (int i = 0; i < getStoreSequenceIdCount(); i++) { if (!getStoreSequenceId(i).isInitialized()) { - return false; } } @@ -2958,7 +3047,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2968,7 +3057,6 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required uint64 last_flushed_sequence_id = 1; private long lastFlushedSequenceId_ ; /** * required uint64 last_flushed_sequence_id = 1; @@ -3001,7 +3089,6 @@ public final class ClusterStatusProtos { return this; } - // repeated .hbase.pb.StoreSequenceId store_sequence_id = 2; private java.util.List storeSequenceId_ = java.util.Collections.emptyList(); private void ensureStoreSequenceIdIsMutable() { @@ -3011,7 +3098,7 @@ public final class ClusterStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder> storeSequenceIdBuilder_; /** @@ -3143,7 +3230,8 @@ public final class ClusterStatusProtos { java.lang.Iterable values) { if (storeSequenceIdBuilder_ == null) { ensureStoreSequenceIdIsMutable(); - super.addAll(values, storeSequenceId_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, storeSequenceId_); onChanged(); } else { storeSequenceIdBuilder_.addAllMessages(values); @@ -3226,11 +3314,11 @@ public final class ClusterStatusProtos { getStoreSequenceIdBuilderList() { return getStoreSequenceIdFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder> getStoreSequenceIdFieldBuilder() { if (storeSequenceIdBuilder_ == null) { - storeSequenceIdBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + storeSequenceIdBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder>( storeSequenceId_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -3240,326 +3328,347 @@ public final class ClusterStatusProtos { } return storeSequenceIdBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionStoreSequenceIds) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionStoreSequenceIds) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds DEFAULT_INSTANCE; static { - defaultInstance = new RegionStoreSequenceIds(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionStoreSequenceIds parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionStoreSequenceIds(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionStoreSequenceIds) } - public interface RegionLoadOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionLoadOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionLoad) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region_specifier = 1; /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
      ** the region specifier 
      * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ boolean hasRegionSpecifier(); /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
      ** the region specifier 
      * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier(); /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
      ** the region specifier 
      * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder(); - // optional uint32 stores = 2; /** - * optional uint32 stores = 2; - * *
      ** the number of stores for the region 
      * 
+ * + * optional uint32 stores = 2; */ boolean hasStores(); /** - * optional uint32 stores = 2; - * *
      ** the number of stores for the region 
      * 
+ * + * optional uint32 stores = 2; */ int getStores(); - // optional uint32 storefiles = 3; /** - * optional uint32 storefiles = 3; - * *
      ** the number of storefiles for the region 
      * 
+ * + * optional uint32 storefiles = 3; */ boolean hasStorefiles(); /** - * optional uint32 storefiles = 3; - * *
      ** the number of storefiles for the region 
      * 
+ * + * optional uint32 storefiles = 3; */ int getStorefiles(); - // optional uint32 store_uncompressed_size_MB = 4; /** - * optional uint32 store_uncompressed_size_MB = 4; - * *
      ** the total size of the store files for the region, uncompressed, in MB 
      * 
+ * + * optional uint32 store_uncompressed_size_MB = 4; */ boolean hasStoreUncompressedSizeMB(); /** - * optional uint32 store_uncompressed_size_MB = 4; - * *
      ** the total size of the store files for the region, uncompressed, in MB 
      * 
+ * + * optional uint32 store_uncompressed_size_MB = 4; */ int getStoreUncompressedSizeMB(); - // optional uint32 storefile_size_MB = 5; /** - * optional uint32 storefile_size_MB = 5; - * *
      ** the current total size of the store files for the region, in MB 
      * 
+ * + * optional uint32 storefile_size_MB = 5; */ boolean hasStorefileSizeMB(); /** - * optional uint32 storefile_size_MB = 5; - * *
      ** the current total size of the store files for the region, in MB 
      * 
+ * + * optional uint32 storefile_size_MB = 5; */ int getStorefileSizeMB(); - // optional uint32 memstore_size_MB = 6; /** - * optional uint32 memstore_size_MB = 6; - * *
      ** the current size of the memstore for the region, in MB 
      * 
+ * + * optional uint32 memstore_size_MB = 6; */ boolean hasMemstoreSizeMB(); /** - * optional uint32 memstore_size_MB = 6; - * *
      ** the current size of the memstore for the region, in MB 
      * 
+ * + * optional uint32 memstore_size_MB = 6; */ int getMemstoreSizeMB(); - // optional uint32 storefile_index_size_MB = 7; /** - * optional uint32 storefile_index_size_MB = 7; - * *
      **
      * The current total size of root-level store file indexes for the region,
      * in MB. The same as {@link #rootIndexSizeKB} but in MB.
      * 
+ * + * optional uint32 storefile_index_size_MB = 7; */ boolean hasStorefileIndexSizeMB(); /** - * optional uint32 storefile_index_size_MB = 7; - * *
      **
      * The current total size of root-level store file indexes for the region,
      * in MB. The same as {@link #rootIndexSizeKB} but in MB.
      * 
+ * + * optional uint32 storefile_index_size_MB = 7; */ int getStorefileIndexSizeMB(); - // optional uint64 read_requests_count = 8; /** - * optional uint64 read_requests_count = 8; - * *
      ** the current total read requests made to region 
      * 
+ * + * optional uint64 read_requests_count = 8; */ boolean hasReadRequestsCount(); /** - * optional uint64 read_requests_count = 8; - * *
      ** the current total read requests made to region 
      * 
+ * + * optional uint64 read_requests_count = 8; */ long getReadRequestsCount(); - // optional uint64 write_requests_count = 9; /** - * optional uint64 write_requests_count = 9; - * *
      ** the current total write requests made to region 
      * 
+ * + * optional uint64 write_requests_count = 9; */ boolean hasWriteRequestsCount(); /** - * optional uint64 write_requests_count = 9; - * *
      ** the current total write requests made to region 
      * 
+ * + * optional uint64 write_requests_count = 9; */ long getWriteRequestsCount(); - // optional uint64 total_compacting_KVs = 10; /** - * optional uint64 total_compacting_KVs = 10; - * *
      ** the total compacting key values in currently running compaction 
      * 
+ * + * optional uint64 total_compacting_KVs = 10; */ boolean hasTotalCompactingKVs(); /** - * optional uint64 total_compacting_KVs = 10; - * *
      ** the total compacting key values in currently running compaction 
      * 
+ * + * optional uint64 total_compacting_KVs = 10; */ long getTotalCompactingKVs(); - // optional uint64 current_compacted_KVs = 11; /** - * optional uint64 current_compacted_KVs = 11; - * *
      ** the completed count of key values in currently running compaction 
      * 
+ * + * optional uint64 current_compacted_KVs = 11; */ boolean hasCurrentCompactedKVs(); /** - * optional uint64 current_compacted_KVs = 11; - * *
      ** the completed count of key values in currently running compaction 
      * 
+ * + * optional uint64 current_compacted_KVs = 11; */ long getCurrentCompactedKVs(); - // optional uint32 root_index_size_KB = 12; /** - * optional uint32 root_index_size_KB = 12; - * *
      ** The current total size of root-level indexes for the region, in KB. 
      * 
+ * + * optional uint32 root_index_size_KB = 12; */ boolean hasRootIndexSizeKB(); /** - * optional uint32 root_index_size_KB = 12; - * *
      ** The current total size of root-level indexes for the region, in KB. 
      * 
+ * + * optional uint32 root_index_size_KB = 12; */ int getRootIndexSizeKB(); - // optional uint32 total_static_index_size_KB = 13; /** - * optional uint32 total_static_index_size_KB = 13; - * *
      ** The total size of all index blocks, not just the root level, in KB. 
      * 
+ * + * optional uint32 total_static_index_size_KB = 13; */ boolean hasTotalStaticIndexSizeKB(); /** - * optional uint32 total_static_index_size_KB = 13; - * *
      ** The total size of all index blocks, not just the root level, in KB. 
      * 
+ * + * optional uint32 total_static_index_size_KB = 13; */ int getTotalStaticIndexSizeKB(); - // optional uint32 total_static_bloom_size_KB = 14; /** - * optional uint32 total_static_bloom_size_KB = 14; - * *
      **
      * The total size of all Bloom filter blocks, not just loaded into the
      * block cache, in KB.
      * 
+ * + * optional uint32 total_static_bloom_size_KB = 14; */ boolean hasTotalStaticBloomSizeKB(); /** - * optional uint32 total_static_bloom_size_KB = 14; - * *
      **
      * The total size of all Bloom filter blocks, not just loaded into the
      * block cache, in KB.
      * 
+ * + * optional uint32 total_static_bloom_size_KB = 14; */ int getTotalStaticBloomSizeKB(); - // optional uint64 complete_sequence_id = 15; /** - * optional uint64 complete_sequence_id = 15; - * *
      ** the most recent sequence Id from cache flush 
      * 
+ * + * optional uint64 complete_sequence_id = 15; */ boolean hasCompleteSequenceId(); /** - * optional uint64 complete_sequence_id = 15; - * *
      ** the most recent sequence Id from cache flush 
      * 
+ * + * optional uint64 complete_sequence_id = 15; */ long getCompleteSequenceId(); - // optional float data_locality = 16; /** - * optional float data_locality = 16; - * *
      ** The current data locality for region in the regionserver 
      * 
+ * + * optional float data_locality = 16; */ boolean hasDataLocality(); /** - * optional float data_locality = 16; - * *
      ** The current data locality for region in the regionserver 
      * 
+ * + * optional float data_locality = 16; */ float getDataLocality(); - // optional uint64 last_major_compaction_ts = 17 [default = 0]; /** * optional uint64 last_major_compaction_ts = 17 [default = 0]; */ @@ -3569,102 +3678,109 @@ public final class ClusterStatusProtos { */ long getLastMajorCompactionTs(); - // repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
      ** the most recent sequence Id of store from cache flush 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ java.util.List getStoreCompleteSequenceIdList(); /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
      ** the most recent sequence Id of store from cache flush 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getStoreCompleteSequenceId(int index); /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
      ** the most recent sequence Id of store from cache flush 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ int getStoreCompleteSequenceIdCount(); /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
      ** the most recent sequence Id of store from cache flush 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ java.util.List getStoreCompleteSequenceIdOrBuilderList(); /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
      ** the most recent sequence Id of store from cache flush 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder getStoreCompleteSequenceIdOrBuilder( int index); - // optional uint64 filtered_read_requests_count = 19; /** - * optional uint64 filtered_read_requests_count = 19; - * *
      ** the current total filtered read requests made to region 
      * 
+ * + * optional uint64 filtered_read_requests_count = 19; */ boolean hasFilteredReadRequestsCount(); /** - * optional uint64 filtered_read_requests_count = 19; - * *
      ** the current total filtered read requests made to region 
      * 
+ * + * optional uint64 filtered_read_requests_count = 19; */ long getFilteredReadRequestsCount(); } /** * Protobuf type {@code hbase.pb.RegionLoad} */ - public static final class RegionLoad extends - com.google.protobuf.GeneratedMessage - implements RegionLoadOrBuilder { + public static final class RegionLoad extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionLoad) + RegionLoadOrBuilder { // Use RegionLoad.newBuilder() to construct. - private RegionLoad(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionLoad(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RegionLoad(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionLoad defaultInstance; - public static RegionLoad getDefaultInstance() { - return defaultInstance; - } - - public RegionLoad getDefaultInstanceForType() { - return defaultInstance; + private RegionLoad() { + stores_ = 0; + storefiles_ = 0; + storeUncompressedSizeMB_ = 0; + storefileSizeMB_ = 0; + memstoreSizeMB_ = 0; + storefileIndexSizeMB_ = 0; + readRequestsCount_ = 0L; + writeRequestsCount_ = 0L; + totalCompactingKVs_ = 0L; + currentCompactedKVs_ = 0L; + rootIndexSizeKB_ = 0; + totalStaticIndexSizeKB_ = 0; + totalStaticBloomSizeKB_ = 0; + completeSequenceId_ = 0L; + dataLocality_ = 0F; + lastMajorCompactionTs_ = 0L; + storeCompleteSequenceId_ = java.util.Collections.emptyList(); + filteredReadRequestsCount_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionLoad( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3781,7 +3897,8 @@ public final class ClusterStatusProtos { storeCompleteSequenceId_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00020000; } - storeCompleteSequenceId_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.PARSER, extensionRegistry)); + storeCompleteSequenceId_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.PARSER, extensionRegistry)); break; } case 152: { @@ -3795,7 +3912,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00020000) == 0x00020000)) { storeCompleteSequenceId_ = java.util.Collections.unmodifiableList(storeCompleteSequenceId_); @@ -3809,432 +3926,400 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionLoad_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionLoad_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionLoad parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionLoad(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region_specifier = 1; public static final int REGION_SPECIFIER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier_; /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
      ** the region specifier 
      * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public boolean hasRegionSpecifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
      ** the region specifier 
      * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier() { - return regionSpecifier_; + return regionSpecifier_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionSpecifier_; } /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
      ** the region specifier 
      * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder() { - return regionSpecifier_; + return regionSpecifier_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionSpecifier_; } - // optional uint32 stores = 2; public static final int STORES_FIELD_NUMBER = 2; private int stores_; /** - * optional uint32 stores = 2; - * *
      ** the number of stores for the region 
      * 
+ * + * optional uint32 stores = 2; */ public boolean hasStores() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint32 stores = 2; - * *
      ** the number of stores for the region 
      * 
+ * + * optional uint32 stores = 2; */ public int getStores() { return stores_; } - // optional uint32 storefiles = 3; public static final int STOREFILES_FIELD_NUMBER = 3; private int storefiles_; /** - * optional uint32 storefiles = 3; - * *
      ** the number of storefiles for the region 
      * 
+ * + * optional uint32 storefiles = 3; */ public boolean hasStorefiles() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional uint32 storefiles = 3; - * *
      ** the number of storefiles for the region 
      * 
+ * + * optional uint32 storefiles = 3; */ public int getStorefiles() { return storefiles_; } - // optional uint32 store_uncompressed_size_MB = 4; public static final int STORE_UNCOMPRESSED_SIZE_MB_FIELD_NUMBER = 4; private int storeUncompressedSizeMB_; /** - * optional uint32 store_uncompressed_size_MB = 4; - * *
      ** the total size of the store files for the region, uncompressed, in MB 
      * 
+ * + * optional uint32 store_uncompressed_size_MB = 4; */ public boolean hasStoreUncompressedSizeMB() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional uint32 store_uncompressed_size_MB = 4; - * *
      ** the total size of the store files for the region, uncompressed, in MB 
      * 
+ * + * optional uint32 store_uncompressed_size_MB = 4; */ public int getStoreUncompressedSizeMB() { return storeUncompressedSizeMB_; } - // optional uint32 storefile_size_MB = 5; public static final int STOREFILE_SIZE_MB_FIELD_NUMBER = 5; private int storefileSizeMB_; /** - * optional uint32 storefile_size_MB = 5; - * *
      ** the current total size of the store files for the region, in MB 
      * 
+ * + * optional uint32 storefile_size_MB = 5; */ public boolean hasStorefileSizeMB() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional uint32 storefile_size_MB = 5; - * *
      ** the current total size of the store files for the region, in MB 
      * 
+ * + * optional uint32 storefile_size_MB = 5; */ public int getStorefileSizeMB() { return storefileSizeMB_; } - // optional uint32 memstore_size_MB = 6; public static final int MEMSTORE_SIZE_MB_FIELD_NUMBER = 6; private int memstoreSizeMB_; /** - * optional uint32 memstore_size_MB = 6; - * *
      ** the current size of the memstore for the region, in MB 
      * 
+ * + * optional uint32 memstore_size_MB = 6; */ public boolean hasMemstoreSizeMB() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * optional uint32 memstore_size_MB = 6; - * *
      ** the current size of the memstore for the region, in MB 
      * 
+ * + * optional uint32 memstore_size_MB = 6; */ public int getMemstoreSizeMB() { return memstoreSizeMB_; } - // optional uint32 storefile_index_size_MB = 7; public static final int STOREFILE_INDEX_SIZE_MB_FIELD_NUMBER = 7; private int storefileIndexSizeMB_; /** - * optional uint32 storefile_index_size_MB = 7; - * *
      **
      * The current total size of root-level store file indexes for the region,
      * in MB. The same as {@link #rootIndexSizeKB} but in MB.
      * 
+ * + * optional uint32 storefile_index_size_MB = 7; */ public boolean hasStorefileIndexSizeMB() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** - * optional uint32 storefile_index_size_MB = 7; - * *
      **
      * The current total size of root-level store file indexes for the region,
      * in MB. The same as {@link #rootIndexSizeKB} but in MB.
      * 
+ * + * optional uint32 storefile_index_size_MB = 7; */ public int getStorefileIndexSizeMB() { return storefileIndexSizeMB_; } - // optional uint64 read_requests_count = 8; public static final int READ_REQUESTS_COUNT_FIELD_NUMBER = 8; private long readRequestsCount_; /** - * optional uint64 read_requests_count = 8; - * *
      ** the current total read requests made to region 
      * 
+ * + * optional uint64 read_requests_count = 8; */ public boolean hasReadRequestsCount() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** - * optional uint64 read_requests_count = 8; - * *
      ** the current total read requests made to region 
      * 
+ * + * optional uint64 read_requests_count = 8; */ public long getReadRequestsCount() { return readRequestsCount_; } - // optional uint64 write_requests_count = 9; public static final int WRITE_REQUESTS_COUNT_FIELD_NUMBER = 9; private long writeRequestsCount_; /** - * optional uint64 write_requests_count = 9; - * *
      ** the current total write requests made to region 
      * 
+ * + * optional uint64 write_requests_count = 9; */ public boolean hasWriteRequestsCount() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** - * optional uint64 write_requests_count = 9; - * *
      ** the current total write requests made to region 
      * 
+ * + * optional uint64 write_requests_count = 9; */ public long getWriteRequestsCount() { return writeRequestsCount_; } - // optional uint64 total_compacting_KVs = 10; public static final int TOTAL_COMPACTING_KVS_FIELD_NUMBER = 10; private long totalCompactingKVs_; /** - * optional uint64 total_compacting_KVs = 10; - * *
      ** the total compacting key values in currently running compaction 
      * 
+ * + * optional uint64 total_compacting_KVs = 10; */ public boolean hasTotalCompactingKVs() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** - * optional uint64 total_compacting_KVs = 10; - * *
      ** the total compacting key values in currently running compaction 
      * 
+ * + * optional uint64 total_compacting_KVs = 10; */ public long getTotalCompactingKVs() { return totalCompactingKVs_; } - // optional uint64 current_compacted_KVs = 11; public static final int CURRENT_COMPACTED_KVS_FIELD_NUMBER = 11; private long currentCompactedKVs_; /** - * optional uint64 current_compacted_KVs = 11; - * *
      ** the completed count of key values in currently running compaction 
      * 
+ * + * optional uint64 current_compacted_KVs = 11; */ public boolean hasCurrentCompactedKVs() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** - * optional uint64 current_compacted_KVs = 11; - * *
      ** the completed count of key values in currently running compaction 
      * 
+ * + * optional uint64 current_compacted_KVs = 11; */ public long getCurrentCompactedKVs() { return currentCompactedKVs_; } - // optional uint32 root_index_size_KB = 12; public static final int ROOT_INDEX_SIZE_KB_FIELD_NUMBER = 12; private int rootIndexSizeKB_; /** - * optional uint32 root_index_size_KB = 12; - * *
      ** The current total size of root-level indexes for the region, in KB. 
      * 
+ * + * optional uint32 root_index_size_KB = 12; */ public boolean hasRootIndexSizeKB() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** - * optional uint32 root_index_size_KB = 12; - * *
      ** The current total size of root-level indexes for the region, in KB. 
      * 
+ * + * optional uint32 root_index_size_KB = 12; */ public int getRootIndexSizeKB() { return rootIndexSizeKB_; } - // optional uint32 total_static_index_size_KB = 13; public static final int TOTAL_STATIC_INDEX_SIZE_KB_FIELD_NUMBER = 13; private int totalStaticIndexSizeKB_; /** - * optional uint32 total_static_index_size_KB = 13; - * *
      ** The total size of all index blocks, not just the root level, in KB. 
      * 
+ * + * optional uint32 total_static_index_size_KB = 13; */ public boolean hasTotalStaticIndexSizeKB() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** - * optional uint32 total_static_index_size_KB = 13; - * *
      ** The total size of all index blocks, not just the root level, in KB. 
      * 
+ * + * optional uint32 total_static_index_size_KB = 13; */ public int getTotalStaticIndexSizeKB() { return totalStaticIndexSizeKB_; } - // optional uint32 total_static_bloom_size_KB = 14; public static final int TOTAL_STATIC_BLOOM_SIZE_KB_FIELD_NUMBER = 14; private int totalStaticBloomSizeKB_; /** - * optional uint32 total_static_bloom_size_KB = 14; - * *
      **
      * The total size of all Bloom filter blocks, not just loaded into the
      * block cache, in KB.
      * 
+ * + * optional uint32 total_static_bloom_size_KB = 14; */ public boolean hasTotalStaticBloomSizeKB() { return ((bitField0_ & 0x00002000) == 0x00002000); } /** - * optional uint32 total_static_bloom_size_KB = 14; - * *
      **
      * The total size of all Bloom filter blocks, not just loaded into the
      * block cache, in KB.
      * 
+ * + * optional uint32 total_static_bloom_size_KB = 14; */ public int getTotalStaticBloomSizeKB() { return totalStaticBloomSizeKB_; } - // optional uint64 complete_sequence_id = 15; public static final int COMPLETE_SEQUENCE_ID_FIELD_NUMBER = 15; private long completeSequenceId_; /** - * optional uint64 complete_sequence_id = 15; - * *
      ** the most recent sequence Id from cache flush 
      * 
+ * + * optional uint64 complete_sequence_id = 15; */ public boolean hasCompleteSequenceId() { return ((bitField0_ & 0x00004000) == 0x00004000); } /** - * optional uint64 complete_sequence_id = 15; - * *
      ** the most recent sequence Id from cache flush 
      * 
+ * + * optional uint64 complete_sequence_id = 15; */ public long getCompleteSequenceId() { return completeSequenceId_; } - // optional float data_locality = 16; public static final int DATA_LOCALITY_FIELD_NUMBER = 16; private float dataLocality_; /** - * optional float data_locality = 16; - * *
      ** The current data locality for region in the regionserver 
      * 
+ * + * optional float data_locality = 16; */ public boolean hasDataLocality() { return ((bitField0_ & 0x00008000) == 0x00008000); } /** - * optional float data_locality = 16; - * *
      ** The current data locality for region in the regionserver 
      * 
+ * + * optional float data_locality = 16; */ public float getDataLocality() { return dataLocality_; } - // optional uint64 last_major_compaction_ts = 17 [default = 0]; public static final int LAST_MAJOR_COMPACTION_TS_FIELD_NUMBER = 17; private long lastMajorCompactionTs_; /** @@ -4250,111 +4335,89 @@ public final class ClusterStatusProtos { return lastMajorCompactionTs_; } - // repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; public static final int STORE_COMPLETE_SEQUENCE_ID_FIELD_NUMBER = 18; private java.util.List storeCompleteSequenceId_; /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
      ** the most recent sequence Id of store from cache flush 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public java.util.List getStoreCompleteSequenceIdList() { return storeCompleteSequenceId_; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
      ** the most recent sequence Id of store from cache flush 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public java.util.List getStoreCompleteSequenceIdOrBuilderList() { return storeCompleteSequenceId_; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
      ** the most recent sequence Id of store from cache flush 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public int getStoreCompleteSequenceIdCount() { return storeCompleteSequenceId_.size(); } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
      ** the most recent sequence Id of store from cache flush 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getStoreCompleteSequenceId(int index) { return storeCompleteSequenceId_.get(index); } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
      ** the most recent sequence Id of store from cache flush 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder getStoreCompleteSequenceIdOrBuilder( int index) { return storeCompleteSequenceId_.get(index); } - // optional uint64 filtered_read_requests_count = 19; public static final int FILTERED_READ_REQUESTS_COUNT_FIELD_NUMBER = 19; private long filteredReadRequestsCount_; /** - * optional uint64 filtered_read_requests_count = 19; - * *
      ** the current total filtered read requests made to region 
      * 
+ * + * optional uint64 filtered_read_requests_count = 19; */ public boolean hasFilteredReadRequestsCount() { return ((bitField0_ & 0x00020000) == 0x00020000); } /** - * optional uint64 filtered_read_requests_count = 19; - * *
      ** the current total filtered read requests made to region 
      * 
+ * + * optional uint64 filtered_read_requests_count = 19; */ public long getFilteredReadRequestsCount() { return filteredReadRequestsCount_; } - private void initFields() { - regionSpecifier_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - stores_ = 0; - storefiles_ = 0; - storeUncompressedSizeMB_ = 0; - storefileSizeMB_ = 0; - memstoreSizeMB_ = 0; - storefileIndexSizeMB_ = 0; - readRequestsCount_ = 0L; - writeRequestsCount_ = 0L; - totalCompactingKVs_ = 0L; - currentCompactedKVs_ = 0L; - rootIndexSizeKB_ = 0; - totalStaticIndexSizeKB_ = 0; - totalStaticBloomSizeKB_ = 0; - completeSequenceId_ = 0L; - dataLocality_ = 0F; - lastMajorCompactionTs_ = 0L; - storeCompleteSequenceId_ = java.util.Collections.emptyList(); - filteredReadRequestsCount_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegionSpecifier()) { memoizedIsInitialized = 0; @@ -4376,9 +4439,8 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionSpecifier_); + output.writeMessage(1, getRegionSpecifier()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, stores_); @@ -4434,18 +4496,17 @@ public final class ClusterStatusProtos { if (((bitField0_ & 0x00020000) == 0x00020000)) { output.writeUInt64(19, filteredReadRequestsCount_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionSpecifier_); + .computeMessageSize(1, getRegionSpecifier()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -4519,19 +4580,13 @@ public final class ClusterStatusProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(19, filteredReadRequestsCount_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4619,7 +4674,10 @@ public final class ClusterStatusProtos { } result = result && (hasDataLocality() == other.hasDataLocality()); if (hasDataLocality()) { - result = result && (Float.floatToIntBits(getDataLocality()) == Float.floatToIntBits(other.getDataLocality())); + result = result && ( + java.lang.Float.floatToIntBits(getDataLocality()) + == java.lang.Float.floatToIntBits( + other.getDataLocality())); } result = result && (hasLastMajorCompactionTs() == other.hasLastMajorCompactionTs()); if (hasLastMajorCompactionTs()) { @@ -4633,12 +4691,10 @@ public final class ClusterStatusProtos { result = result && (getFilteredReadRequestsCount() == other.getFilteredReadRequestsCount()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4676,19 +4732,23 @@ public final class ClusterStatusProtos { } if (hasReadRequestsCount()) { hash = (37 * hash) + READ_REQUESTS_COUNT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getReadRequestsCount()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getReadRequestsCount()); } if (hasWriteRequestsCount()) { hash = (37 * hash) + WRITE_REQUESTS_COUNT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getWriteRequestsCount()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getWriteRequestsCount()); } if (hasTotalCompactingKVs()) { hash = (37 * hash) + TOTAL_COMPACTING_KVS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTotalCompactingKVs()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTotalCompactingKVs()); } if (hasCurrentCompactedKVs()) { hash = (37 * hash) + CURRENT_COMPACTED_KVS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCurrentCompactedKVs()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getCurrentCompactedKVs()); } if (hasRootIndexSizeKB()) { hash = (37 * hash) + ROOT_INDEX_SIZE_KB_FIELD_NUMBER; @@ -4704,16 +4764,18 @@ public final class ClusterStatusProtos { } if (hasCompleteSequenceId()) { hash = (37 * hash) + COMPLETE_SEQUENCE_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCompleteSequenceId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getCompleteSequenceId()); } if (hasDataLocality()) { hash = (37 * hash) + DATA_LOCALITY_FIELD_NUMBER; - hash = (53 * hash) + Float.floatToIntBits( + hash = (53 * hash) + java.lang.Float.floatToIntBits( getDataLocality()); } if (hasLastMajorCompactionTs()) { hash = (37 * hash) + LAST_MAJOR_COMPACTION_TS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLastMajorCompactionTs()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLastMajorCompactionTs()); } if (getStoreCompleteSequenceIdCount() > 0) { hash = (37 * hash) + STORE_COMPLETE_SEQUENCE_ID_FIELD_NUMBER; @@ -4721,9 +4783,10 @@ public final class ClusterStatusProtos { } if (hasFilteredReadRequestsCount()) { hash = (37 * hash) + FILTERED_READ_REQUESTS_COUNT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getFilteredReadRequestsCount()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getFilteredReadRequestsCount()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4751,46 +4814,57 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4798,14 +4872,15 @@ public final class ClusterStatusProtos { * Protobuf type {@code hbase.pb.RegionLoad} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoadOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionLoad) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoadOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionLoad_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionLoad_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4818,24 +4893,21 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionSpecifierFieldBuilder(); getStoreCompleteSequenceIdFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionSpecifierBuilder_ == null) { - regionSpecifier_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionSpecifier_ = null; } else { regionSpecifierBuilder_.clear(); } @@ -4883,10 +4955,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionLoad_descriptor; @@ -4998,6 +5066,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad)other); @@ -5079,7 +5173,7 @@ public final class ClusterStatusProtos { storeCompleteSequenceId_ = other.storeCompleteSequenceId_; bitField0_ = (bitField0_ & ~0x00020000); storeCompleteSequenceIdBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStoreCompleteSequenceIdFieldBuilder() : null; } else { storeCompleteSequenceIdBuilder_.addAllMessages(other.storeCompleteSequenceId_); @@ -5089,22 +5183,20 @@ public final class ClusterStatusProtos { if (other.hasFilteredReadRequestsCount()) { setFilteredReadRequestsCount(other.getFilteredReadRequestsCount()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionSpecifier()) { - return false; } if (!getRegionSpecifier().isInitialized()) { - return false; } for (int i = 0; i < getStoreCompleteSequenceIdCount(); i++) { if (!getStoreCompleteSequenceId(i).isInitialized()) { - return false; } } @@ -5120,7 +5212,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5130,40 +5222,39 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region_specifier = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionSpecifier_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionSpecifierBuilder_; /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
        ** the region specifier 
        * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public boolean hasRegionSpecifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
        ** the region specifier 
        * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionSpecifier() { if (regionSpecifierBuilder_ == null) { - return regionSpecifier_; + return regionSpecifier_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionSpecifier_; } else { return regionSpecifierBuilder_.getMessage(); } } /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
        ** the region specifier 
        * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public Builder setRegionSpecifier(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionSpecifierBuilder_ == null) { @@ -5179,11 +5270,11 @@ public final class ClusterStatusProtos { return this; } /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
        ** the region specifier 
        * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public Builder setRegionSpecifier( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { @@ -5197,15 +5288,16 @@ public final class ClusterStatusProtos { return this; } /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
        ** the region specifier 
        * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public Builder mergeRegionSpecifier(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionSpecifierBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + regionSpecifier_ != null && regionSpecifier_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { regionSpecifier_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionSpecifier_).mergeFrom(value).buildPartial(); @@ -5220,15 +5312,15 @@ public final class ClusterStatusProtos { return this; } /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
        ** the region specifier 
        * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public Builder clearRegionSpecifier() { if (regionSpecifierBuilder_ == null) { - regionSpecifier_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionSpecifier_ = null; onChanged(); } else { regionSpecifierBuilder_.clear(); @@ -5237,11 +5329,11 @@ public final class ClusterStatusProtos { return this; } /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
        ** the region specifier 
        * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionSpecifierBuilder() { bitField0_ |= 0x00000001; @@ -5249,33 +5341,34 @@ public final class ClusterStatusProtos { return getRegionSpecifierFieldBuilder().getBuilder(); } /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
        ** the region specifier 
        * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionSpecifierOrBuilder() { if (regionSpecifierBuilder_ != null) { return regionSpecifierBuilder_.getMessageOrBuilder(); } else { - return regionSpecifier_; + return regionSpecifier_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionSpecifier_; } } /** - * required .hbase.pb.RegionSpecifier region_specifier = 1; - * *
        ** the region specifier 
        * 
+ * + * required .hbase.pb.RegionSpecifier region_specifier = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionSpecifierFieldBuilder() { if (regionSpecifierBuilder_ == null) { - regionSpecifierBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionSpecifierBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - regionSpecifier_, + getRegionSpecifier(), getParentForChildren(), isClean()); regionSpecifier_ = null; @@ -5283,34 +5376,33 @@ public final class ClusterStatusProtos { return regionSpecifierBuilder_; } - // optional uint32 stores = 2; private int stores_ ; /** - * optional uint32 stores = 2; - * *
        ** the number of stores for the region 
        * 
+ * + * optional uint32 stores = 2; */ public boolean hasStores() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint32 stores = 2; - * *
        ** the number of stores for the region 
        * 
+ * + * optional uint32 stores = 2; */ public int getStores() { return stores_; } /** - * optional uint32 stores = 2; - * *
        ** the number of stores for the region 
        * 
+ * + * optional uint32 stores = 2; */ public Builder setStores(int value) { bitField0_ |= 0x00000002; @@ -5319,11 +5411,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 stores = 2; - * *
        ** the number of stores for the region 
        * 
+ * + * optional uint32 stores = 2; */ public Builder clearStores() { bitField0_ = (bitField0_ & ~0x00000002); @@ -5332,34 +5424,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 storefiles = 3; private int storefiles_ ; /** - * optional uint32 storefiles = 3; - * *
        ** the number of storefiles for the region 
        * 
+ * + * optional uint32 storefiles = 3; */ public boolean hasStorefiles() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional uint32 storefiles = 3; - * *
        ** the number of storefiles for the region 
        * 
+ * + * optional uint32 storefiles = 3; */ public int getStorefiles() { return storefiles_; } /** - * optional uint32 storefiles = 3; - * *
        ** the number of storefiles for the region 
        * 
+ * + * optional uint32 storefiles = 3; */ public Builder setStorefiles(int value) { bitField0_ |= 0x00000004; @@ -5368,11 +5459,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 storefiles = 3; - * *
        ** the number of storefiles for the region 
        * 
+ * + * optional uint32 storefiles = 3; */ public Builder clearStorefiles() { bitField0_ = (bitField0_ & ~0x00000004); @@ -5381,34 +5472,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 store_uncompressed_size_MB = 4; private int storeUncompressedSizeMB_ ; /** - * optional uint32 store_uncompressed_size_MB = 4; - * *
        ** the total size of the store files for the region, uncompressed, in MB 
        * 
+ * + * optional uint32 store_uncompressed_size_MB = 4; */ public boolean hasStoreUncompressedSizeMB() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional uint32 store_uncompressed_size_MB = 4; - * *
        ** the total size of the store files for the region, uncompressed, in MB 
        * 
+ * + * optional uint32 store_uncompressed_size_MB = 4; */ public int getStoreUncompressedSizeMB() { return storeUncompressedSizeMB_; } /** - * optional uint32 store_uncompressed_size_MB = 4; - * *
        ** the total size of the store files for the region, uncompressed, in MB 
        * 
+ * + * optional uint32 store_uncompressed_size_MB = 4; */ public Builder setStoreUncompressedSizeMB(int value) { bitField0_ |= 0x00000008; @@ -5417,11 +5507,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 store_uncompressed_size_MB = 4; - * *
        ** the total size of the store files for the region, uncompressed, in MB 
        * 
+ * + * optional uint32 store_uncompressed_size_MB = 4; */ public Builder clearStoreUncompressedSizeMB() { bitField0_ = (bitField0_ & ~0x00000008); @@ -5430,34 +5520,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 storefile_size_MB = 5; private int storefileSizeMB_ ; /** - * optional uint32 storefile_size_MB = 5; - * *
        ** the current total size of the store files for the region, in MB 
        * 
+ * + * optional uint32 storefile_size_MB = 5; */ public boolean hasStorefileSizeMB() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional uint32 storefile_size_MB = 5; - * *
        ** the current total size of the store files for the region, in MB 
        * 
+ * + * optional uint32 storefile_size_MB = 5; */ public int getStorefileSizeMB() { return storefileSizeMB_; } /** - * optional uint32 storefile_size_MB = 5; - * *
        ** the current total size of the store files for the region, in MB 
        * 
+ * + * optional uint32 storefile_size_MB = 5; */ public Builder setStorefileSizeMB(int value) { bitField0_ |= 0x00000010; @@ -5466,11 +5555,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 storefile_size_MB = 5; - * *
        ** the current total size of the store files for the region, in MB 
        * 
+ * + * optional uint32 storefile_size_MB = 5; */ public Builder clearStorefileSizeMB() { bitField0_ = (bitField0_ & ~0x00000010); @@ -5479,34 +5568,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 memstore_size_MB = 6; private int memstoreSizeMB_ ; /** - * optional uint32 memstore_size_MB = 6; - * *
        ** the current size of the memstore for the region, in MB 
        * 
+ * + * optional uint32 memstore_size_MB = 6; */ public boolean hasMemstoreSizeMB() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * optional uint32 memstore_size_MB = 6; - * *
        ** the current size of the memstore for the region, in MB 
        * 
+ * + * optional uint32 memstore_size_MB = 6; */ public int getMemstoreSizeMB() { return memstoreSizeMB_; } /** - * optional uint32 memstore_size_MB = 6; - * *
        ** the current size of the memstore for the region, in MB 
        * 
+ * + * optional uint32 memstore_size_MB = 6; */ public Builder setMemstoreSizeMB(int value) { bitField0_ |= 0x00000020; @@ -5515,11 +5603,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 memstore_size_MB = 6; - * *
        ** the current size of the memstore for the region, in MB 
        * 
+ * + * optional uint32 memstore_size_MB = 6; */ public Builder clearMemstoreSizeMB() { bitField0_ = (bitField0_ & ~0x00000020); @@ -5528,40 +5616,39 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 storefile_index_size_MB = 7; private int storefileIndexSizeMB_ ; /** - * optional uint32 storefile_index_size_MB = 7; - * *
        **
        * The current total size of root-level store file indexes for the region,
        * in MB. The same as {@link #rootIndexSizeKB} but in MB.
        * 
+ * + * optional uint32 storefile_index_size_MB = 7; */ public boolean hasStorefileIndexSizeMB() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** - * optional uint32 storefile_index_size_MB = 7; - * *
        **
        * The current total size of root-level store file indexes for the region,
        * in MB. The same as {@link #rootIndexSizeKB} but in MB.
        * 
+ * + * optional uint32 storefile_index_size_MB = 7; */ public int getStorefileIndexSizeMB() { return storefileIndexSizeMB_; } /** - * optional uint32 storefile_index_size_MB = 7; - * *
        **
        * The current total size of root-level store file indexes for the region,
        * in MB. The same as {@link #rootIndexSizeKB} but in MB.
        * 
+ * + * optional uint32 storefile_index_size_MB = 7; */ public Builder setStorefileIndexSizeMB(int value) { bitField0_ |= 0x00000040; @@ -5570,13 +5657,13 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 storefile_index_size_MB = 7; - * *
        **
        * The current total size of root-level store file indexes for the region,
        * in MB. The same as {@link #rootIndexSizeKB} but in MB.
        * 
+ * + * optional uint32 storefile_index_size_MB = 7; */ public Builder clearStorefileIndexSizeMB() { bitField0_ = (bitField0_ & ~0x00000040); @@ -5585,34 +5672,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint64 read_requests_count = 8; private long readRequestsCount_ ; /** - * optional uint64 read_requests_count = 8; - * *
        ** the current total read requests made to region 
        * 
+ * + * optional uint64 read_requests_count = 8; */ public boolean hasReadRequestsCount() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** - * optional uint64 read_requests_count = 8; - * *
        ** the current total read requests made to region 
        * 
+ * + * optional uint64 read_requests_count = 8; */ public long getReadRequestsCount() { return readRequestsCount_; } /** - * optional uint64 read_requests_count = 8; - * *
        ** the current total read requests made to region 
        * 
+ * + * optional uint64 read_requests_count = 8; */ public Builder setReadRequestsCount(long value) { bitField0_ |= 0x00000080; @@ -5621,11 +5707,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint64 read_requests_count = 8; - * *
        ** the current total read requests made to region 
        * 
+ * + * optional uint64 read_requests_count = 8; */ public Builder clearReadRequestsCount() { bitField0_ = (bitField0_ & ~0x00000080); @@ -5634,34 +5720,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint64 write_requests_count = 9; private long writeRequestsCount_ ; /** - * optional uint64 write_requests_count = 9; - * *
        ** the current total write requests made to region 
        * 
+ * + * optional uint64 write_requests_count = 9; */ public boolean hasWriteRequestsCount() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** - * optional uint64 write_requests_count = 9; - * *
        ** the current total write requests made to region 
        * 
+ * + * optional uint64 write_requests_count = 9; */ public long getWriteRequestsCount() { return writeRequestsCount_; } /** - * optional uint64 write_requests_count = 9; - * *
        ** the current total write requests made to region 
        * 
+ * + * optional uint64 write_requests_count = 9; */ public Builder setWriteRequestsCount(long value) { bitField0_ |= 0x00000100; @@ -5670,11 +5755,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint64 write_requests_count = 9; - * *
        ** the current total write requests made to region 
        * 
+ * + * optional uint64 write_requests_count = 9; */ public Builder clearWriteRequestsCount() { bitField0_ = (bitField0_ & ~0x00000100); @@ -5683,34 +5768,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint64 total_compacting_KVs = 10; private long totalCompactingKVs_ ; /** - * optional uint64 total_compacting_KVs = 10; - * *
        ** the total compacting key values in currently running compaction 
        * 
+ * + * optional uint64 total_compacting_KVs = 10; */ public boolean hasTotalCompactingKVs() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** - * optional uint64 total_compacting_KVs = 10; - * *
        ** the total compacting key values in currently running compaction 
        * 
+ * + * optional uint64 total_compacting_KVs = 10; */ public long getTotalCompactingKVs() { return totalCompactingKVs_; } /** - * optional uint64 total_compacting_KVs = 10; - * *
        ** the total compacting key values in currently running compaction 
        * 
+ * + * optional uint64 total_compacting_KVs = 10; */ public Builder setTotalCompactingKVs(long value) { bitField0_ |= 0x00000200; @@ -5719,11 +5803,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint64 total_compacting_KVs = 10; - * *
        ** the total compacting key values in currently running compaction 
        * 
+ * + * optional uint64 total_compacting_KVs = 10; */ public Builder clearTotalCompactingKVs() { bitField0_ = (bitField0_ & ~0x00000200); @@ -5732,34 +5816,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint64 current_compacted_KVs = 11; private long currentCompactedKVs_ ; /** - * optional uint64 current_compacted_KVs = 11; - * *
        ** the completed count of key values in currently running compaction 
        * 
+ * + * optional uint64 current_compacted_KVs = 11; */ public boolean hasCurrentCompactedKVs() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** - * optional uint64 current_compacted_KVs = 11; - * *
        ** the completed count of key values in currently running compaction 
        * 
+ * + * optional uint64 current_compacted_KVs = 11; */ public long getCurrentCompactedKVs() { return currentCompactedKVs_; } /** - * optional uint64 current_compacted_KVs = 11; - * *
        ** the completed count of key values in currently running compaction 
        * 
+ * + * optional uint64 current_compacted_KVs = 11; */ public Builder setCurrentCompactedKVs(long value) { bitField0_ |= 0x00000400; @@ -5768,11 +5851,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint64 current_compacted_KVs = 11; - * *
        ** the completed count of key values in currently running compaction 
        * 
+ * + * optional uint64 current_compacted_KVs = 11; */ public Builder clearCurrentCompactedKVs() { bitField0_ = (bitField0_ & ~0x00000400); @@ -5781,34 +5864,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 root_index_size_KB = 12; private int rootIndexSizeKB_ ; /** - * optional uint32 root_index_size_KB = 12; - * *
        ** The current total size of root-level indexes for the region, in KB. 
        * 
+ * + * optional uint32 root_index_size_KB = 12; */ public boolean hasRootIndexSizeKB() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** - * optional uint32 root_index_size_KB = 12; - * *
        ** The current total size of root-level indexes for the region, in KB. 
        * 
+ * + * optional uint32 root_index_size_KB = 12; */ public int getRootIndexSizeKB() { return rootIndexSizeKB_; } /** - * optional uint32 root_index_size_KB = 12; - * *
        ** The current total size of root-level indexes for the region, in KB. 
        * 
+ * + * optional uint32 root_index_size_KB = 12; */ public Builder setRootIndexSizeKB(int value) { bitField0_ |= 0x00000800; @@ -5817,11 +5899,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 root_index_size_KB = 12; - * *
        ** The current total size of root-level indexes for the region, in KB. 
        * 
+ * + * optional uint32 root_index_size_KB = 12; */ public Builder clearRootIndexSizeKB() { bitField0_ = (bitField0_ & ~0x00000800); @@ -5830,34 +5912,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 total_static_index_size_KB = 13; private int totalStaticIndexSizeKB_ ; /** - * optional uint32 total_static_index_size_KB = 13; - * *
        ** The total size of all index blocks, not just the root level, in KB. 
        * 
+ * + * optional uint32 total_static_index_size_KB = 13; */ public boolean hasTotalStaticIndexSizeKB() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** - * optional uint32 total_static_index_size_KB = 13; - * *
        ** The total size of all index blocks, not just the root level, in KB. 
        * 
+ * + * optional uint32 total_static_index_size_KB = 13; */ public int getTotalStaticIndexSizeKB() { return totalStaticIndexSizeKB_; } /** - * optional uint32 total_static_index_size_KB = 13; - * *
        ** The total size of all index blocks, not just the root level, in KB. 
        * 
+ * + * optional uint32 total_static_index_size_KB = 13; */ public Builder setTotalStaticIndexSizeKB(int value) { bitField0_ |= 0x00001000; @@ -5866,11 +5947,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 total_static_index_size_KB = 13; - * *
        ** The total size of all index blocks, not just the root level, in KB. 
        * 
+ * + * optional uint32 total_static_index_size_KB = 13; */ public Builder clearTotalStaticIndexSizeKB() { bitField0_ = (bitField0_ & ~0x00001000); @@ -5879,40 +5960,39 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 total_static_bloom_size_KB = 14; private int totalStaticBloomSizeKB_ ; /** - * optional uint32 total_static_bloom_size_KB = 14; - * *
        **
        * The total size of all Bloom filter blocks, not just loaded into the
        * block cache, in KB.
        * 
+ * + * optional uint32 total_static_bloom_size_KB = 14; */ public boolean hasTotalStaticBloomSizeKB() { return ((bitField0_ & 0x00002000) == 0x00002000); } /** - * optional uint32 total_static_bloom_size_KB = 14; - * *
        **
        * The total size of all Bloom filter blocks, not just loaded into the
        * block cache, in KB.
        * 
+ * + * optional uint32 total_static_bloom_size_KB = 14; */ public int getTotalStaticBloomSizeKB() { return totalStaticBloomSizeKB_; } /** - * optional uint32 total_static_bloom_size_KB = 14; - * *
        **
        * The total size of all Bloom filter blocks, not just loaded into the
        * block cache, in KB.
        * 
+ * + * optional uint32 total_static_bloom_size_KB = 14; */ public Builder setTotalStaticBloomSizeKB(int value) { bitField0_ |= 0x00002000; @@ -5921,13 +6001,13 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 total_static_bloom_size_KB = 14; - * *
        **
        * The total size of all Bloom filter blocks, not just loaded into the
        * block cache, in KB.
        * 
+ * + * optional uint32 total_static_bloom_size_KB = 14; */ public Builder clearTotalStaticBloomSizeKB() { bitField0_ = (bitField0_ & ~0x00002000); @@ -5936,34 +6016,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint64 complete_sequence_id = 15; private long completeSequenceId_ ; /** - * optional uint64 complete_sequence_id = 15; - * *
        ** the most recent sequence Id from cache flush 
        * 
+ * + * optional uint64 complete_sequence_id = 15; */ public boolean hasCompleteSequenceId() { return ((bitField0_ & 0x00004000) == 0x00004000); } /** - * optional uint64 complete_sequence_id = 15; - * *
        ** the most recent sequence Id from cache flush 
        * 
+ * + * optional uint64 complete_sequence_id = 15; */ public long getCompleteSequenceId() { return completeSequenceId_; } /** - * optional uint64 complete_sequence_id = 15; - * *
        ** the most recent sequence Id from cache flush 
        * 
+ * + * optional uint64 complete_sequence_id = 15; */ public Builder setCompleteSequenceId(long value) { bitField0_ |= 0x00004000; @@ -5972,11 +6051,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint64 complete_sequence_id = 15; - * *
        ** the most recent sequence Id from cache flush 
        * 
+ * + * optional uint64 complete_sequence_id = 15; */ public Builder clearCompleteSequenceId() { bitField0_ = (bitField0_ & ~0x00004000); @@ -5985,34 +6064,33 @@ public final class ClusterStatusProtos { return this; } - // optional float data_locality = 16; private float dataLocality_ ; /** - * optional float data_locality = 16; - * *
        ** The current data locality for region in the regionserver 
        * 
+ * + * optional float data_locality = 16; */ public boolean hasDataLocality() { return ((bitField0_ & 0x00008000) == 0x00008000); } /** - * optional float data_locality = 16; - * *
        ** The current data locality for region in the regionserver 
        * 
+ * + * optional float data_locality = 16; */ public float getDataLocality() { return dataLocality_; } /** - * optional float data_locality = 16; - * *
        ** The current data locality for region in the regionserver 
        * 
+ * + * optional float data_locality = 16; */ public Builder setDataLocality(float value) { bitField0_ |= 0x00008000; @@ -6021,11 +6099,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional float data_locality = 16; - * *
        ** The current data locality for region in the regionserver 
        * 
+ * + * optional float data_locality = 16; */ public Builder clearDataLocality() { bitField0_ = (bitField0_ & ~0x00008000); @@ -6034,7 +6112,6 @@ public final class ClusterStatusProtos { return this; } - // optional uint64 last_major_compaction_ts = 17 [default = 0]; private long lastMajorCompactionTs_ ; /** * optional uint64 last_major_compaction_ts = 17 [default = 0]; @@ -6067,7 +6144,6 @@ public final class ClusterStatusProtos { return this; } - // repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; private java.util.List storeCompleteSequenceId_ = java.util.Collections.emptyList(); private void ensureStoreCompleteSequenceIdIsMutable() { @@ -6077,15 +6153,15 @@ public final class ClusterStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder> storeCompleteSequenceIdBuilder_; /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public java.util.List getStoreCompleteSequenceIdList() { if (storeCompleteSequenceIdBuilder_ == null) { @@ -6095,11 +6171,11 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public int getStoreCompleteSequenceIdCount() { if (storeCompleteSequenceIdBuilder_ == null) { @@ -6109,11 +6185,11 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getStoreCompleteSequenceId(int index) { if (storeCompleteSequenceIdBuilder_ == null) { @@ -6123,11 +6199,11 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public Builder setStoreCompleteSequenceId( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId value) { @@ -6144,11 +6220,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public Builder setStoreCompleteSequenceId( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder builderForValue) { @@ -6162,11 +6238,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public Builder addStoreCompleteSequenceId(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId value) { if (storeCompleteSequenceIdBuilder_ == null) { @@ -6182,11 +6258,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public Builder addStoreCompleteSequenceId( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId value) { @@ -6203,11 +6279,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public Builder addStoreCompleteSequenceId( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder builderForValue) { @@ -6221,11 +6297,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public Builder addStoreCompleteSequenceId( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder builderForValue) { @@ -6239,17 +6315,18 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public Builder addAllStoreCompleteSequenceId( java.lang.Iterable values) { if (storeCompleteSequenceIdBuilder_ == null) { ensureStoreCompleteSequenceIdIsMutable(); - super.addAll(values, storeCompleteSequenceId_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, storeCompleteSequenceId_); onChanged(); } else { storeCompleteSequenceIdBuilder_.addAllMessages(values); @@ -6257,11 +6334,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public Builder clearStoreCompleteSequenceId() { if (storeCompleteSequenceIdBuilder_ == null) { @@ -6274,11 +6351,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public Builder removeStoreCompleteSequenceId(int index) { if (storeCompleteSequenceIdBuilder_ == null) { @@ -6291,22 +6368,22 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder getStoreCompleteSequenceIdBuilder( int index) { return getStoreCompleteSequenceIdFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder getStoreCompleteSequenceIdOrBuilder( int index) { @@ -6316,11 +6393,11 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public java.util.List getStoreCompleteSequenceIdOrBuilderList() { @@ -6331,22 +6408,22 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreCompleteSequenceIdBuilder() { return getStoreCompleteSequenceIdFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()); } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreCompleteSequenceIdBuilder( int index) { @@ -6354,21 +6431,21 @@ public final class ClusterStatusProtos { index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()); } /** - * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; - * *
        ** the most recent sequence Id of store from cache flush 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18; */ public java.util.List getStoreCompleteSequenceIdBuilderList() { return getStoreCompleteSequenceIdFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder> getStoreCompleteSequenceIdFieldBuilder() { if (storeCompleteSequenceIdBuilder_ == null) { - storeCompleteSequenceIdBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + storeCompleteSequenceIdBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder>( storeCompleteSequenceId_, ((bitField0_ & 0x00020000) == 0x00020000), @@ -6379,34 +6456,33 @@ public final class ClusterStatusProtos { return storeCompleteSequenceIdBuilder_; } - // optional uint64 filtered_read_requests_count = 19; private long filteredReadRequestsCount_ ; /** - * optional uint64 filtered_read_requests_count = 19; - * *
        ** the current total filtered read requests made to region 
        * 
+ * + * optional uint64 filtered_read_requests_count = 19; */ public boolean hasFilteredReadRequestsCount() { return ((bitField0_ & 0x00040000) == 0x00040000); } /** - * optional uint64 filtered_read_requests_count = 19; - * *
        ** the current total filtered read requests made to region 
        * 
+ * + * optional uint64 filtered_read_requests_count = 19; */ public long getFilteredReadRequestsCount() { return filteredReadRequestsCount_; } /** - * optional uint64 filtered_read_requests_count = 19; - * *
        ** the current total filtered read requests made to region 
        * 
+ * + * optional uint64 filtered_read_requests_count = 19; */ public Builder setFilteredReadRequestsCount(long value) { bitField0_ |= 0x00040000; @@ -6415,11 +6491,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint64 filtered_read_requests_count = 19; - * *
        ** the current total filtered read requests made to region 
        * 
+ * + * optional uint64 filtered_read_requests_count = 19; */ public Builder clearFilteredReadRequestsCount() { bitField0_ = (bitField0_ & ~0x00040000); @@ -6427,22 +6503,59 @@ public final class ClusterStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionLoad) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionLoad) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad DEFAULT_INSTANCE; static { - defaultInstance = new RegionLoad(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionLoad parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionLoad(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionLoad) } - public interface ReplicationLoadSinkOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReplicationLoadSinkOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReplicationLoadSink) + com.google.protobuf.MessageOrBuilder { - // required uint64 ageOfLastAppliedOp = 1; /** * required uint64 ageOfLastAppliedOp = 1; */ @@ -6452,7 +6565,6 @@ public final class ClusterStatusProtos { */ long getAgeOfLastAppliedOp(); - // required uint64 timeStampsOfLastAppliedOp = 2; /** * required uint64 timeStampsOfLastAppliedOp = 2; */ @@ -6465,36 +6577,29 @@ public final class ClusterStatusProtos { /** * Protobuf type {@code hbase.pb.ReplicationLoadSink} */ - public static final class ReplicationLoadSink extends - com.google.protobuf.GeneratedMessage - implements ReplicationLoadSinkOrBuilder { + public static final class ReplicationLoadSink extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReplicationLoadSink) + ReplicationLoadSinkOrBuilder { // Use ReplicationLoadSink.newBuilder() to construct. - private ReplicationLoadSink(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReplicationLoadSink(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReplicationLoadSink(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReplicationLoadSink defaultInstance; - public static ReplicationLoadSink getDefaultInstance() { - return defaultInstance; } - - public ReplicationLoadSink getDefaultInstanceForType() { - return defaultInstance; + private ReplicationLoadSink() { + ageOfLastAppliedOp_ = 0L; + timeStampsOfLastAppliedOp_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReplicationLoadSink( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6529,7 +6634,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -6540,30 +6645,14 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ReplicationLoadSink_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ReplicationLoadSink_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReplicationLoadSink parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReplicationLoadSink(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint64 ageOfLastAppliedOp = 1; public static final int AGEOFLASTAPPLIEDOP_FIELD_NUMBER = 1; private long ageOfLastAppliedOp_; /** @@ -6579,7 +6668,6 @@ public final class ClusterStatusProtos { return ageOfLastAppliedOp_; } - // required uint64 timeStampsOfLastAppliedOp = 2; public static final int TIMESTAMPSOFLASTAPPLIEDOP_FIELD_NUMBER = 2; private long timeStampsOfLastAppliedOp_; /** @@ -6595,14 +6683,11 @@ public final class ClusterStatusProtos { return timeStampsOfLastAppliedOp_; } - private void initFields() { - ageOfLastAppliedOp_ = 0L; - timeStampsOfLastAppliedOp_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasAgeOfLastAppliedOp()) { memoizedIsInitialized = 0; @@ -6618,19 +6703,17 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, ageOfLastAppliedOp_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, timeStampsOfLastAppliedOp_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -6642,19 +6725,13 @@ public final class ClusterStatusProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, timeStampsOfLastAppliedOp_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6675,12 +6752,10 @@ public final class ClusterStatusProtos { result = result && (getTimeStampsOfLastAppliedOp() == other.getTimeStampsOfLastAppliedOp()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6690,13 +6765,15 @@ public final class ClusterStatusProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasAgeOfLastAppliedOp()) { hash = (37 * hash) + AGEOFLASTAPPLIEDOP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getAgeOfLastAppliedOp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getAgeOfLastAppliedOp()); } if (hasTimeStampsOfLastAppliedOp()) { hash = (37 * hash) + TIMESTAMPSOFLASTAPPLIEDOP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimeStampsOfLastAppliedOp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimeStampsOfLastAppliedOp()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6724,46 +6801,57 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6771,14 +6859,15 @@ public final class ClusterStatusProtos { * Protobuf type {@code hbase.pb.ReplicationLoadSink} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSinkOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReplicationLoadSink) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSinkOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ReplicationLoadSink_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ReplicationLoadSink_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6791,18 +6880,15 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); ageOfLastAppliedOp_ = 0L; @@ -6812,10 +6898,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ReplicationLoadSink_descriptor; @@ -6850,6 +6932,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink)other); @@ -6867,17 +6975,16 @@ public final class ClusterStatusProtos { if (other.hasTimeStampsOfLastAppliedOp()) { setTimeStampsOfLastAppliedOp(other.getTimeStampsOfLastAppliedOp()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasAgeOfLastAppliedOp()) { - return false; } if (!hasTimeStampsOfLastAppliedOp()) { - return false; } return true; @@ -6892,7 +6999,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6902,7 +7009,6 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required uint64 ageOfLastAppliedOp = 1; private long ageOfLastAppliedOp_ ; /** * required uint64 ageOfLastAppliedOp = 1; @@ -6935,7 +7041,6 @@ public final class ClusterStatusProtos { return this; } - // required uint64 timeStampsOfLastAppliedOp = 2; private long timeStampsOfLastAppliedOp_ ; /** * required uint64 timeStampsOfLastAppliedOp = 2; @@ -6967,22 +7072,59 @@ public final class ClusterStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReplicationLoadSink) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReplicationLoadSink) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink DEFAULT_INSTANCE; static { - defaultInstance = new ReplicationLoadSink(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReplicationLoadSink parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicationLoadSink(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReplicationLoadSink) } - public interface ReplicationLoadSourceOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReplicationLoadSourceOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReplicationLoadSource) + com.google.protobuf.MessageOrBuilder { - // required string peerID = 1; /** * required string peerID = 1; */ @@ -6997,7 +7139,6 @@ public final class ClusterStatusProtos { com.google.protobuf.ByteString getPeerIDBytes(); - // required uint64 ageOfLastShippedOp = 2; /** * required uint64 ageOfLastShippedOp = 2; */ @@ -7007,7 +7148,6 @@ public final class ClusterStatusProtos { */ long getAgeOfLastShippedOp(); - // required uint32 sizeOfLogQueue = 3; /** * required uint32 sizeOfLogQueue = 3; */ @@ -7017,7 +7157,6 @@ public final class ClusterStatusProtos { */ int getSizeOfLogQueue(); - // required uint64 timeStampOfLastShippedOp = 4; /** * required uint64 timeStampOfLastShippedOp = 4; */ @@ -7027,7 +7166,6 @@ public final class ClusterStatusProtos { */ long getTimeStampOfLastShippedOp(); - // required uint64 replicationLag = 5; /** * required uint64 replicationLag = 5; */ @@ -7040,36 +7178,32 @@ public final class ClusterStatusProtos { /** * Protobuf type {@code hbase.pb.ReplicationLoadSource} */ - public static final class ReplicationLoadSource extends - com.google.protobuf.GeneratedMessage - implements ReplicationLoadSourceOrBuilder { + public static final class ReplicationLoadSource extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReplicationLoadSource) + ReplicationLoadSourceOrBuilder { // Use ReplicationLoadSource.newBuilder() to construct. - private ReplicationLoadSource(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReplicationLoadSource(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReplicationLoadSource(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReplicationLoadSource defaultInstance; - public static ReplicationLoadSource getDefaultInstance() { - return defaultInstance; } - - public ReplicationLoadSource getDefaultInstanceForType() { - return defaultInstance; + private ReplicationLoadSource() { + peerID_ = ""; + ageOfLastShippedOp_ = 0L; + sizeOfLogQueue_ = 0; + timeStampOfLastShippedOp_ = 0L; + replicationLag_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReplicationLoadSource( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7089,8 +7223,9 @@ public final class ClusterStatusProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - peerID_ = input.readBytes(); + peerID_ = bs; break; } case 16: { @@ -7119,7 +7254,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7130,32 +7265,16 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ReplicationLoadSource_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ReplicationLoadSource_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReplicationLoadSource parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReplicationLoadSource(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string peerID = 1; public static final int PEERID_FIELD_NUMBER = 1; - private java.lang.Object peerID_; + private volatile java.lang.Object peerID_; /** * required string peerID = 1; */ @@ -7196,7 +7315,6 @@ public final class ClusterStatusProtos { } } - // required uint64 ageOfLastShippedOp = 2; public static final int AGEOFLASTSHIPPEDOP_FIELD_NUMBER = 2; private long ageOfLastShippedOp_; /** @@ -7212,7 +7330,6 @@ public final class ClusterStatusProtos { return ageOfLastShippedOp_; } - // required uint32 sizeOfLogQueue = 3; public static final int SIZEOFLOGQUEUE_FIELD_NUMBER = 3; private int sizeOfLogQueue_; /** @@ -7228,7 +7345,6 @@ public final class ClusterStatusProtos { return sizeOfLogQueue_; } - // required uint64 timeStampOfLastShippedOp = 4; public static final int TIMESTAMPOFLASTSHIPPEDOP_FIELD_NUMBER = 4; private long timeStampOfLastShippedOp_; /** @@ -7244,7 +7360,6 @@ public final class ClusterStatusProtos { return timeStampOfLastShippedOp_; } - // required uint64 replicationLag = 5; public static final int REPLICATIONLAG_FIELD_NUMBER = 5; private long replicationLag_; /** @@ -7260,17 +7375,11 @@ public final class ClusterStatusProtos { return replicationLag_; } - private void initFields() { - peerID_ = ""; - ageOfLastShippedOp_ = 0L; - sizeOfLogQueue_ = 0; - timeStampOfLastShippedOp_ = 0L; - replicationLag_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasPeerID()) { memoizedIsInitialized = 0; @@ -7298,9 +7407,8 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getPeerIDBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, peerID_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, ageOfLastShippedOp_); @@ -7314,18 +7422,16 @@ public final class ClusterStatusProtos { if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeUInt64(5, replicationLag_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getPeerIDBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, peerID_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -7343,19 +7449,13 @@ public final class ClusterStatusProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(5, replicationLag_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7391,12 +7491,10 @@ public final class ClusterStatusProtos { result = result && (getReplicationLag() == other.getReplicationLag()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7410,7 +7508,8 @@ public final class ClusterStatusProtos { } if (hasAgeOfLastShippedOp()) { hash = (37 * hash) + AGEOFLASTSHIPPEDOP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getAgeOfLastShippedOp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getAgeOfLastShippedOp()); } if (hasSizeOfLogQueue()) { hash = (37 * hash) + SIZEOFLOGQUEUE_FIELD_NUMBER; @@ -7418,13 +7517,15 @@ public final class ClusterStatusProtos { } if (hasTimeStampOfLastShippedOp()) { hash = (37 * hash) + TIMESTAMPOFLASTSHIPPEDOP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimeStampOfLastShippedOp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimeStampOfLastShippedOp()); } if (hasReplicationLag()) { hash = (37 * hash) + REPLICATIONLAG_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getReplicationLag()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getReplicationLag()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7452,46 +7553,57 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -7499,14 +7611,15 @@ public final class ClusterStatusProtos { * Protobuf type {@code hbase.pb.ReplicationLoadSource} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSourceOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReplicationLoadSource) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSourceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ReplicationLoadSource_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ReplicationLoadSource_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7519,18 +7632,15 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); peerID_ = ""; @@ -7546,10 +7656,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ReplicationLoadSource_descriptor; @@ -7596,6 +7702,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource)other); @@ -7624,29 +7756,25 @@ public final class ClusterStatusProtos { if (other.hasReplicationLag()) { setReplicationLag(other.getReplicationLag()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasPeerID()) { - return false; } if (!hasAgeOfLastShippedOp()) { - return false; } if (!hasSizeOfLogQueue()) { - return false; } if (!hasTimeStampOfLastShippedOp()) { - return false; } if (!hasReplicationLag()) { - return false; } return true; @@ -7661,7 +7789,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7671,7 +7799,6 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required string peerID = 1; private java.lang.Object peerID_ = ""; /** * required string peerID = 1; @@ -7685,9 +7812,12 @@ public final class ClusterStatusProtos { public java.lang.String getPeerID() { java.lang.Object ref = peerID_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - peerID_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + peerID_ = s; + } return s; } else { return (java.lang.String) ref; @@ -7745,7 +7875,6 @@ public final class ClusterStatusProtos { return this; } - // required uint64 ageOfLastShippedOp = 2; private long ageOfLastShippedOp_ ; /** * required uint64 ageOfLastShippedOp = 2; @@ -7778,7 +7907,6 @@ public final class ClusterStatusProtos { return this; } - // required uint32 sizeOfLogQueue = 3; private int sizeOfLogQueue_ ; /** * required uint32 sizeOfLogQueue = 3; @@ -7811,7 +7939,6 @@ public final class ClusterStatusProtos { return this; } - // required uint64 timeStampOfLastShippedOp = 4; private long timeStampOfLastShippedOp_ ; /** * required uint64 timeStampOfLastShippedOp = 4; @@ -7844,7 +7971,6 @@ public final class ClusterStatusProtos { return this; } - // required uint64 replicationLag = 5; private long replicationLag_ ; /** * required uint64 replicationLag = 5; @@ -7876,378 +8002,406 @@ public final class ClusterStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReplicationLoadSource) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReplicationLoadSource) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource DEFAULT_INSTANCE; static { - defaultInstance = new ReplicationLoadSource(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReplicationLoadSource parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicationLoadSource(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReplicationLoadSource) } - public interface ServerLoadOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ServerLoadOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ServerLoad) + com.google.protobuf.MessageOrBuilder { - // optional uint64 number_of_requests = 1; /** - * optional uint64 number_of_requests = 1; - * *
      ** Number of requests since last report. 
      * 
+ * + * optional uint64 number_of_requests = 1; */ boolean hasNumberOfRequests(); /** - * optional uint64 number_of_requests = 1; - * *
      ** Number of requests since last report. 
      * 
+ * + * optional uint64 number_of_requests = 1; */ long getNumberOfRequests(); - // optional uint64 total_number_of_requests = 2; /** - * optional uint64 total_number_of_requests = 2; - * *
      ** Total Number of requests from the start of the region server. 
      * 
+ * + * optional uint64 total_number_of_requests = 2; */ boolean hasTotalNumberOfRequests(); /** - * optional uint64 total_number_of_requests = 2; - * *
      ** Total Number of requests from the start of the region server. 
      * 
+ * + * optional uint64 total_number_of_requests = 2; */ long getTotalNumberOfRequests(); - // optional uint32 used_heap_MB = 3; /** - * optional uint32 used_heap_MB = 3; - * *
      ** the amount of used heap, in MB. 
      * 
+ * + * optional uint32 used_heap_MB = 3; */ boolean hasUsedHeapMB(); /** - * optional uint32 used_heap_MB = 3; - * *
      ** the amount of used heap, in MB. 
      * 
+ * + * optional uint32 used_heap_MB = 3; */ int getUsedHeapMB(); - // optional uint32 max_heap_MB = 4; /** - * optional uint32 max_heap_MB = 4; - * *
      ** the maximum allowable size of the heap, in MB. 
      * 
+ * + * optional uint32 max_heap_MB = 4; */ boolean hasMaxHeapMB(); /** - * optional uint32 max_heap_MB = 4; - * *
      ** the maximum allowable size of the heap, in MB. 
      * 
+ * + * optional uint32 max_heap_MB = 4; */ int getMaxHeapMB(); - // repeated .hbase.pb.RegionLoad region_loads = 5; /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
      ** Information on the load of individual regions. 
      * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ java.util.List getRegionLoadsList(); /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
      ** Information on the load of individual regions. 
      * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad getRegionLoads(int index); /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
      ** Information on the load of individual regions. 
      * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ int getRegionLoadsCount(); /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
      ** Information on the load of individual regions. 
      * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ java.util.List getRegionLoadsOrBuilderList(); /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
      ** Information on the load of individual regions. 
      * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( int index); - // repeated .hbase.pb.Coprocessor coprocessors = 6; /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
      **
      * Regionserver-level coprocessors, e.g., WALObserver implementations.
      * Region-level coprocessors, on the other hand, are stored inside RegionLoad
      * objects.
      * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ java.util.List getCoprocessorsList(); /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
      **
      * Regionserver-level coprocessors, e.g., WALObserver implementations.
      * Region-level coprocessors, on the other hand, are stored inside RegionLoad
      * objects.
      * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index); /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
      **
      * Regionserver-level coprocessors, e.g., WALObserver implementations.
      * Region-level coprocessors, on the other hand, are stored inside RegionLoad
      * objects.
      * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ int getCoprocessorsCount(); /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
      **
      * Regionserver-level coprocessors, e.g., WALObserver implementations.
      * Region-level coprocessors, on the other hand, are stored inside RegionLoad
      * objects.
      * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ java.util.List getCoprocessorsOrBuilderList(); /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
      **
      * Regionserver-level coprocessors, e.g., WALObserver implementations.
      * Region-level coprocessors, on the other hand, are stored inside RegionLoad
      * objects.
      * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( int index); - // optional uint64 report_start_time = 7; /** - * optional uint64 report_start_time = 7; - * *
      **
      * Time when incremental (non-total) counts began being calculated (e.g. number_of_requests)
      * time is measured as the difference, measured in milliseconds, between the current time
      * and midnight, January 1, 1970 UTC.
      * 
+ * + * optional uint64 report_start_time = 7; */ boolean hasReportStartTime(); /** - * optional uint64 report_start_time = 7; - * *
      **
      * Time when incremental (non-total) counts began being calculated (e.g. number_of_requests)
      * time is measured as the difference, measured in milliseconds, between the current time
      * and midnight, January 1, 1970 UTC.
      * 
+ * + * optional uint64 report_start_time = 7; */ long getReportStartTime(); - // optional uint64 report_end_time = 8; /** - * optional uint64 report_end_time = 8; - * *
      **
      * Time when report was generated.
      * time is measured as the difference, measured in milliseconds, between the current time
      * and midnight, January 1, 1970 UTC.
      * 
+ * + * optional uint64 report_end_time = 8; */ boolean hasReportEndTime(); /** - * optional uint64 report_end_time = 8; - * *
      **
      * Time when report was generated.
      * time is measured as the difference, measured in milliseconds, between the current time
      * and midnight, January 1, 1970 UTC.
      * 
+ * + * optional uint64 report_end_time = 8; */ long getReportEndTime(); - // optional uint32 info_server_port = 9; /** - * optional uint32 info_server_port = 9; - * *
      **
      * The port number that this region server is hosing an info server on.
      * 
+ * + * optional uint32 info_server_port = 9; */ boolean hasInfoServerPort(); /** - * optional uint32 info_server_port = 9; - * *
      **
      * The port number that this region server is hosing an info server on.
      * 
+ * + * optional uint32 info_server_port = 9; */ int getInfoServerPort(); - // repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
      **
      * The replicationLoadSource for the replication Source status of this region server.
      * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ java.util.List getReplLoadSourceList(); /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
      **
      * The replicationLoadSource for the replication Source status of this region server.
      * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource getReplLoadSource(int index); /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
      **
      * The replicationLoadSource for the replication Source status of this region server.
      * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ int getReplLoadSourceCount(); /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
      **
      * The replicationLoadSource for the replication Source status of this region server.
      * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ java.util.List getReplLoadSourceOrBuilderList(); /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
      **
      * The replicationLoadSource for the replication Source status of this region server.
      * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSourceOrBuilder getReplLoadSourceOrBuilder( int index); - // optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
      **
      * The replicationLoadSink for the replication Sink status of this region server.
      * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ boolean hasReplLoadSink(); /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
      **
      * The replicationLoadSink for the replication Sink status of this region server.
      * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink getReplLoadSink(); /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
      **
      * The replicationLoadSink for the replication Sink status of this region server.
      * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSinkOrBuilder getReplLoadSinkOrBuilder(); } /** * Protobuf type {@code hbase.pb.ServerLoad} */ - public static final class ServerLoad extends - com.google.protobuf.GeneratedMessage - implements ServerLoadOrBuilder { + public static final class ServerLoad extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ServerLoad) + ServerLoadOrBuilder { // Use ServerLoad.newBuilder() to construct. - private ServerLoad(com.google.protobuf.GeneratedMessage.Builder builder) { + private ServerLoad(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ServerLoad(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ServerLoad defaultInstance; - public static ServerLoad getDefaultInstance() { - return defaultInstance; } - - public ServerLoad getDefaultInstanceForType() { - return defaultInstance; + private ServerLoad() { + numberOfRequests_ = 0L; + totalNumberOfRequests_ = 0L; + usedHeapMB_ = 0; + maxHeapMB_ = 0; + regionLoads_ = java.util.Collections.emptyList(); + coprocessors_ = java.util.Collections.emptyList(); + reportStartTime_ = 0L; + reportEndTime_ = 0L; + infoServerPort_ = 0; + replLoadSource_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ServerLoad( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8291,7 +8445,8 @@ public final class ClusterStatusProtos { regionLoads_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000010; } - regionLoads_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.PARSER, extensionRegistry)); + regionLoads_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.PARSER, extensionRegistry)); break; } case 50: { @@ -8299,7 +8454,8 @@ public final class ClusterStatusProtos { coprocessors_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000020; } - coprocessors_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.PARSER, extensionRegistry)); + coprocessors_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.PARSER, extensionRegistry)); break; } case 56: { @@ -8322,7 +8478,8 @@ public final class ClusterStatusProtos { replLoadSource_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000200; } - replLoadSource_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.PARSER, extensionRegistry)); + replLoadSource_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.PARSER, extensionRegistry)); break; } case 90: { @@ -8344,7 +8501,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { regionLoads_ = java.util.Collections.unmodifiableList(regionLoads_); @@ -8364,453 +8521,415 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ServerLoad_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ServerLoad_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ServerLoad parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ServerLoad(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 number_of_requests = 1; public static final int NUMBER_OF_REQUESTS_FIELD_NUMBER = 1; private long numberOfRequests_; /** - * optional uint64 number_of_requests = 1; - * *
      ** Number of requests since last report. 
      * 
+ * + * optional uint64 number_of_requests = 1; */ public boolean hasNumberOfRequests() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint64 number_of_requests = 1; - * *
      ** Number of requests since last report. 
      * 
+ * + * optional uint64 number_of_requests = 1; */ public long getNumberOfRequests() { return numberOfRequests_; } - // optional uint64 total_number_of_requests = 2; public static final int TOTAL_NUMBER_OF_REQUESTS_FIELD_NUMBER = 2; private long totalNumberOfRequests_; /** - * optional uint64 total_number_of_requests = 2; - * *
      ** Total Number of requests from the start of the region server. 
      * 
+ * + * optional uint64 total_number_of_requests = 2; */ public boolean hasTotalNumberOfRequests() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint64 total_number_of_requests = 2; - * *
      ** Total Number of requests from the start of the region server. 
      * 
+ * + * optional uint64 total_number_of_requests = 2; */ public long getTotalNumberOfRequests() { return totalNumberOfRequests_; } - // optional uint32 used_heap_MB = 3; public static final int USED_HEAP_MB_FIELD_NUMBER = 3; private int usedHeapMB_; /** - * optional uint32 used_heap_MB = 3; - * *
      ** the amount of used heap, in MB. 
      * 
+ * + * optional uint32 used_heap_MB = 3; */ public boolean hasUsedHeapMB() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional uint32 used_heap_MB = 3; - * *
      ** the amount of used heap, in MB. 
      * 
+ * + * optional uint32 used_heap_MB = 3; */ public int getUsedHeapMB() { return usedHeapMB_; } - // optional uint32 max_heap_MB = 4; public static final int MAX_HEAP_MB_FIELD_NUMBER = 4; private int maxHeapMB_; /** - * optional uint32 max_heap_MB = 4; - * *
      ** the maximum allowable size of the heap, in MB. 
      * 
+ * + * optional uint32 max_heap_MB = 4; */ public boolean hasMaxHeapMB() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional uint32 max_heap_MB = 4; - * *
      ** the maximum allowable size of the heap, in MB. 
      * 
+ * + * optional uint32 max_heap_MB = 4; */ public int getMaxHeapMB() { return maxHeapMB_; } - // repeated .hbase.pb.RegionLoad region_loads = 5; public static final int REGION_LOADS_FIELD_NUMBER = 5; private java.util.List regionLoads_; /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
      ** Information on the load of individual regions. 
      * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public java.util.List getRegionLoadsList() { return regionLoads_; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
      ** Information on the load of individual regions. 
      * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public java.util.List getRegionLoadsOrBuilderList() { return regionLoads_; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
      ** Information on the load of individual regions. 
      * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public int getRegionLoadsCount() { return regionLoads_.size(); } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
      ** Information on the load of individual regions. 
      * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad getRegionLoads(int index) { return regionLoads_.get(index); } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
      ** Information on the load of individual regions. 
      * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( int index) { return regionLoads_.get(index); } - // repeated .hbase.pb.Coprocessor coprocessors = 6; public static final int COPROCESSORS_FIELD_NUMBER = 6; private java.util.List coprocessors_; /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
      **
      * Regionserver-level coprocessors, e.g., WALObserver implementations.
      * Region-level coprocessors, on the other hand, are stored inside RegionLoad
      * objects.
      * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public java.util.List getCoprocessorsList() { return coprocessors_; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
      **
      * Regionserver-level coprocessors, e.g., WALObserver implementations.
      * Region-level coprocessors, on the other hand, are stored inside RegionLoad
      * objects.
      * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public java.util.List getCoprocessorsOrBuilderList() { return coprocessors_; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
      **
      * Regionserver-level coprocessors, e.g., WALObserver implementations.
      * Region-level coprocessors, on the other hand, are stored inside RegionLoad
      * objects.
      * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public int getCoprocessorsCount() { return coprocessors_.size(); } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
      **
      * Regionserver-level coprocessors, e.g., WALObserver implementations.
      * Region-level coprocessors, on the other hand, are stored inside RegionLoad
      * objects.
      * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { return coprocessors_.get(index); } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
      **
      * Regionserver-level coprocessors, e.g., WALObserver implementations.
      * Region-level coprocessors, on the other hand, are stored inside RegionLoad
      * objects.
      * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( int index) { return coprocessors_.get(index); } - // optional uint64 report_start_time = 7; public static final int REPORT_START_TIME_FIELD_NUMBER = 7; private long reportStartTime_; /** - * optional uint64 report_start_time = 7; - * *
      **
      * Time when incremental (non-total) counts began being calculated (e.g. number_of_requests)
      * time is measured as the difference, measured in milliseconds, between the current time
      * and midnight, January 1, 1970 UTC.
      * 
+ * + * optional uint64 report_start_time = 7; */ public boolean hasReportStartTime() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional uint64 report_start_time = 7; - * *
      **
      * Time when incremental (non-total) counts began being calculated (e.g. number_of_requests)
      * time is measured as the difference, measured in milliseconds, between the current time
      * and midnight, January 1, 1970 UTC.
      * 
+ * + * optional uint64 report_start_time = 7; */ public long getReportStartTime() { return reportStartTime_; } - // optional uint64 report_end_time = 8; public static final int REPORT_END_TIME_FIELD_NUMBER = 8; private long reportEndTime_; /** - * optional uint64 report_end_time = 8; - * *
      **
      * Time when report was generated.
      * time is measured as the difference, measured in milliseconds, between the current time
      * and midnight, January 1, 1970 UTC.
      * 
+ * + * optional uint64 report_end_time = 8; */ public boolean hasReportEndTime() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * optional uint64 report_end_time = 8; - * *
      **
      * Time when report was generated.
      * time is measured as the difference, measured in milliseconds, between the current time
      * and midnight, January 1, 1970 UTC.
      * 
+ * + * optional uint64 report_end_time = 8; */ public long getReportEndTime() { return reportEndTime_; } - // optional uint32 info_server_port = 9; public static final int INFO_SERVER_PORT_FIELD_NUMBER = 9; private int infoServerPort_; /** - * optional uint32 info_server_port = 9; - * *
      **
      * The port number that this region server is hosing an info server on.
      * 
+ * + * optional uint32 info_server_port = 9; */ public boolean hasInfoServerPort() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** - * optional uint32 info_server_port = 9; - * *
      **
      * The port number that this region server is hosing an info server on.
      * 
+ * + * optional uint32 info_server_port = 9; */ public int getInfoServerPort() { return infoServerPort_; } - // repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; public static final int REPLLOADSOURCE_FIELD_NUMBER = 10; private java.util.List replLoadSource_; /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
      **
      * The replicationLoadSource for the replication Source status of this region server.
      * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public java.util.List getReplLoadSourceList() { return replLoadSource_; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
      **
      * The replicationLoadSource for the replication Source status of this region server.
      * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public java.util.List getReplLoadSourceOrBuilderList() { return replLoadSource_; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
      **
      * The replicationLoadSource for the replication Source status of this region server.
      * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public int getReplLoadSourceCount() { return replLoadSource_.size(); } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
      **
      * The replicationLoadSource for the replication Source status of this region server.
      * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource getReplLoadSource(int index) { return replLoadSource_.get(index); } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
      **
      * The replicationLoadSource for the replication Source status of this region server.
      * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSourceOrBuilder getReplLoadSourceOrBuilder( int index) { return replLoadSource_.get(index); } - // optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; public static final int REPLLOADSINK_FIELD_NUMBER = 11; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink replLoadSink_; /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
      **
      * The replicationLoadSink for the replication Sink status of this region server.
      * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public boolean hasReplLoadSink() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
      **
      * The replicationLoadSink for the replication Sink status of this region server.
      * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink getReplLoadSink() { - return replLoadSink_; + return replLoadSink_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.getDefaultInstance() : replLoadSink_; } /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
      **
      * The replicationLoadSink for the replication Sink status of this region server.
      * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSinkOrBuilder getReplLoadSinkOrBuilder() { - return replLoadSink_; + return replLoadSink_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.getDefaultInstance() : replLoadSink_; } - private void initFields() { - numberOfRequests_ = 0L; - totalNumberOfRequests_ = 0L; - usedHeapMB_ = 0; - maxHeapMB_ = 0; - regionLoads_ = java.util.Collections.emptyList(); - coprocessors_ = java.util.Collections.emptyList(); - reportStartTime_ = 0L; - reportEndTime_ = 0L; - infoServerPort_ = 0; - replLoadSource_ = java.util.Collections.emptyList(); - replLoadSink_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getRegionLoadsCount(); i++) { if (!getRegionLoads(i).isInitialized()) { @@ -8842,7 +8961,6 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, numberOfRequests_); } @@ -8874,14 +8992,13 @@ public final class ClusterStatusProtos { output.writeMessage(10, replLoadSource_.get(i)); } if (((bitField0_ & 0x00000080) == 0x00000080)) { - output.writeMessage(11, replLoadSink_); + output.writeMessage(11, getReplLoadSink()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -8927,21 +9044,15 @@ public final class ClusterStatusProtos { } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(11, replLoadSink_); + .computeMessageSize(11, getReplLoadSink()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8998,12 +9109,10 @@ public final class ClusterStatusProtos { result = result && getReplLoadSink() .equals(other.getReplLoadSink()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9013,11 +9122,13 @@ public final class ClusterStatusProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNumberOfRequests()) { hash = (37 * hash) + NUMBER_OF_REQUESTS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNumberOfRequests()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNumberOfRequests()); } if (hasTotalNumberOfRequests()) { hash = (37 * hash) + TOTAL_NUMBER_OF_REQUESTS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTotalNumberOfRequests()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTotalNumberOfRequests()); } if (hasUsedHeapMB()) { hash = (37 * hash) + USED_HEAP_MB_FIELD_NUMBER; @@ -9037,11 +9148,13 @@ public final class ClusterStatusProtos { } if (hasReportStartTime()) { hash = (37 * hash) + REPORT_START_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getReportStartTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getReportStartTime()); } if (hasReportEndTime()) { hash = (37 * hash) + REPORT_END_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getReportEndTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getReportEndTime()); } if (hasInfoServerPort()) { hash = (37 * hash) + INFO_SERVER_PORT_FIELD_NUMBER; @@ -9055,7 +9168,7 @@ public final class ClusterStatusProtos { hash = (37 * hash) + REPLLOADSINK_FIELD_NUMBER; hash = (53 * hash) + getReplLoadSink().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9083,46 +9196,57 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -9130,14 +9254,15 @@ public final class ClusterStatusProtos { * Protobuf type {@code hbase.pb.ServerLoad} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ServerLoad) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ServerLoad_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ServerLoad_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9150,22 +9275,19 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionLoadsFieldBuilder(); getCoprocessorsFieldBuilder(); getReplLoadSourceFieldBuilder(); getReplLoadSinkFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); numberOfRequests_ = 0L; @@ -9201,7 +9323,7 @@ public final class ClusterStatusProtos { replLoadSourceBuilder_.clear(); } if (replLoadSinkBuilder_ == null) { - replLoadSink_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.getDefaultInstance(); + replLoadSink_ = null; } else { replLoadSinkBuilder_.clear(); } @@ -9209,10 +9331,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ServerLoad_descriptor; @@ -9302,6 +9420,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad)other); @@ -9344,7 +9488,7 @@ public final class ClusterStatusProtos { regionLoads_ = other.regionLoads_; bitField0_ = (bitField0_ & ~0x00000010); regionLoadsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionLoadsFieldBuilder() : null; } else { regionLoadsBuilder_.addAllMessages(other.regionLoads_); @@ -9370,7 +9514,7 @@ public final class ClusterStatusProtos { coprocessors_ = other.coprocessors_; bitField0_ = (bitField0_ & ~0x00000020); coprocessorsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCoprocessorsFieldBuilder() : null; } else { coprocessorsBuilder_.addAllMessages(other.coprocessors_); @@ -9405,7 +9549,7 @@ public final class ClusterStatusProtos { replLoadSource_ = other.replLoadSource_; bitField0_ = (bitField0_ & ~0x00000200); replLoadSourceBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getReplLoadSourceFieldBuilder() : null; } else { replLoadSourceBuilder_.addAllMessages(other.replLoadSource_); @@ -9415,32 +9559,29 @@ public final class ClusterStatusProtos { if (other.hasReplLoadSink()) { mergeReplLoadSink(other.getReplLoadSink()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getRegionLoadsCount(); i++) { if (!getRegionLoads(i).isInitialized()) { - return false; } } for (int i = 0; i < getCoprocessorsCount(); i++) { if (!getCoprocessors(i).isInitialized()) { - return false; } } for (int i = 0; i < getReplLoadSourceCount(); i++) { if (!getReplLoadSource(i).isInitialized()) { - return false; } } if (hasReplLoadSink()) { if (!getReplLoadSink().isInitialized()) { - return false; } } @@ -9456,7 +9597,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9466,34 +9607,33 @@ public final class ClusterStatusProtos { } private int bitField0_; - // optional uint64 number_of_requests = 1; private long numberOfRequests_ ; /** - * optional uint64 number_of_requests = 1; - * *
        ** Number of requests since last report. 
        * 
+ * + * optional uint64 number_of_requests = 1; */ public boolean hasNumberOfRequests() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint64 number_of_requests = 1; - * *
        ** Number of requests since last report. 
        * 
+ * + * optional uint64 number_of_requests = 1; */ public long getNumberOfRequests() { return numberOfRequests_; } /** - * optional uint64 number_of_requests = 1; - * *
        ** Number of requests since last report. 
        * 
+ * + * optional uint64 number_of_requests = 1; */ public Builder setNumberOfRequests(long value) { bitField0_ |= 0x00000001; @@ -9502,11 +9642,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint64 number_of_requests = 1; - * *
        ** Number of requests since last report. 
        * 
+ * + * optional uint64 number_of_requests = 1; */ public Builder clearNumberOfRequests() { bitField0_ = (bitField0_ & ~0x00000001); @@ -9515,34 +9655,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint64 total_number_of_requests = 2; private long totalNumberOfRequests_ ; /** - * optional uint64 total_number_of_requests = 2; - * *
        ** Total Number of requests from the start of the region server. 
        * 
+ * + * optional uint64 total_number_of_requests = 2; */ public boolean hasTotalNumberOfRequests() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint64 total_number_of_requests = 2; - * *
        ** Total Number of requests from the start of the region server. 
        * 
+ * + * optional uint64 total_number_of_requests = 2; */ public long getTotalNumberOfRequests() { return totalNumberOfRequests_; } /** - * optional uint64 total_number_of_requests = 2; - * *
        ** Total Number of requests from the start of the region server. 
        * 
+ * + * optional uint64 total_number_of_requests = 2; */ public Builder setTotalNumberOfRequests(long value) { bitField0_ |= 0x00000002; @@ -9551,11 +9690,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint64 total_number_of_requests = 2; - * *
        ** Total Number of requests from the start of the region server. 
        * 
+ * + * optional uint64 total_number_of_requests = 2; */ public Builder clearTotalNumberOfRequests() { bitField0_ = (bitField0_ & ~0x00000002); @@ -9564,34 +9703,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 used_heap_MB = 3; private int usedHeapMB_ ; /** - * optional uint32 used_heap_MB = 3; - * *
        ** the amount of used heap, in MB. 
        * 
+ * + * optional uint32 used_heap_MB = 3; */ public boolean hasUsedHeapMB() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional uint32 used_heap_MB = 3; - * *
        ** the amount of used heap, in MB. 
        * 
+ * + * optional uint32 used_heap_MB = 3; */ public int getUsedHeapMB() { return usedHeapMB_; } /** - * optional uint32 used_heap_MB = 3; - * *
        ** the amount of used heap, in MB. 
        * 
+ * + * optional uint32 used_heap_MB = 3; */ public Builder setUsedHeapMB(int value) { bitField0_ |= 0x00000004; @@ -9600,11 +9738,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 used_heap_MB = 3; - * *
        ** the amount of used heap, in MB. 
        * 
+ * + * optional uint32 used_heap_MB = 3; */ public Builder clearUsedHeapMB() { bitField0_ = (bitField0_ & ~0x00000004); @@ -9613,34 +9751,33 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 max_heap_MB = 4; private int maxHeapMB_ ; /** - * optional uint32 max_heap_MB = 4; - * *
        ** the maximum allowable size of the heap, in MB. 
        * 
+ * + * optional uint32 max_heap_MB = 4; */ public boolean hasMaxHeapMB() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional uint32 max_heap_MB = 4; - * *
        ** the maximum allowable size of the heap, in MB. 
        * 
+ * + * optional uint32 max_heap_MB = 4; */ public int getMaxHeapMB() { return maxHeapMB_; } /** - * optional uint32 max_heap_MB = 4; - * *
        ** the maximum allowable size of the heap, in MB. 
        * 
+ * + * optional uint32 max_heap_MB = 4; */ public Builder setMaxHeapMB(int value) { bitField0_ |= 0x00000008; @@ -9649,11 +9786,11 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 max_heap_MB = 4; - * *
        ** the maximum allowable size of the heap, in MB. 
        * 
+ * + * optional uint32 max_heap_MB = 4; */ public Builder clearMaxHeapMB() { bitField0_ = (bitField0_ & ~0x00000008); @@ -9662,7 +9799,6 @@ public final class ClusterStatusProtos { return this; } - // repeated .hbase.pb.RegionLoad region_loads = 5; private java.util.List regionLoads_ = java.util.Collections.emptyList(); private void ensureRegionLoadsIsMutable() { @@ -9672,15 +9808,15 @@ public final class ClusterStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoadOrBuilder> regionLoadsBuilder_; /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public java.util.List getRegionLoadsList() { if (regionLoadsBuilder_ == null) { @@ -9690,11 +9826,11 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public int getRegionLoadsCount() { if (regionLoadsBuilder_ == null) { @@ -9704,11 +9840,11 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad getRegionLoads(int index) { if (regionLoadsBuilder_ == null) { @@ -9718,11 +9854,11 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public Builder setRegionLoads( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad value) { @@ -9739,11 +9875,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public Builder setRegionLoads( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) { @@ -9757,11 +9893,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public Builder addRegionLoads(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad value) { if (regionLoadsBuilder_ == null) { @@ -9777,11 +9913,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public Builder addRegionLoads( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad value) { @@ -9798,11 +9934,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public Builder addRegionLoads( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) { @@ -9816,11 +9952,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public Builder addRegionLoads( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) { @@ -9834,17 +9970,18 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public Builder addAllRegionLoads( java.lang.Iterable values) { if (regionLoadsBuilder_ == null) { ensureRegionLoadsIsMutable(); - super.addAll(values, regionLoads_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionLoads_); onChanged(); } else { regionLoadsBuilder_.addAllMessages(values); @@ -9852,11 +9989,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public Builder clearRegionLoads() { if (regionLoadsBuilder_ == null) { @@ -9869,11 +10006,11 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public Builder removeRegionLoads(int index) { if (regionLoadsBuilder_ == null) { @@ -9886,22 +10023,22 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder getRegionLoadsBuilder( int index) { return getRegionLoadsFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoadOrBuilder getRegionLoadsOrBuilder( int index) { @@ -9911,11 +10048,11 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public java.util.List getRegionLoadsOrBuilderList() { @@ -9926,22 +10063,22 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder addRegionLoadsBuilder() { return getRegionLoadsFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.getDefaultInstance()); } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder addRegionLoadsBuilder( int index) { @@ -9949,21 +10086,21 @@ public final class ClusterStatusProtos { index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.getDefaultInstance()); } /** - * repeated .hbase.pb.RegionLoad region_loads = 5; - * *
        ** Information on the load of individual regions. 
        * 
+ * + * repeated .hbase.pb.RegionLoad region_loads = 5; */ public java.util.List getRegionLoadsBuilderList() { return getRegionLoadsFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoadOrBuilder> getRegionLoadsFieldBuilder() { if (regionLoadsBuilder_ == null) { - regionLoadsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionLoadsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoadOrBuilder>( regionLoads_, ((bitField0_ & 0x00000010) == 0x00000010), @@ -9974,7 +10111,6 @@ public final class ClusterStatusProtos { return regionLoadsBuilder_; } - // repeated .hbase.pb.Coprocessor coprocessors = 6; private java.util.List coprocessors_ = java.util.Collections.emptyList(); private void ensureCoprocessorsIsMutable() { @@ -9984,18 +10120,18 @@ public final class ClusterStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> coprocessorsBuilder_; /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public java.util.List getCoprocessorsList() { if (coprocessorsBuilder_ == null) { @@ -10005,14 +10141,14 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public int getCoprocessorsCount() { if (coprocessorsBuilder_ == null) { @@ -10022,14 +10158,14 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) { if (coprocessorsBuilder_ == null) { @@ -10039,14 +10175,14 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public Builder setCoprocessors( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor value) { @@ -10063,14 +10199,14 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public Builder setCoprocessors( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { @@ -10084,14 +10220,14 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public Builder addCoprocessors(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor value) { if (coprocessorsBuilder_ == null) { @@ -10107,14 +10243,14 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public Builder addCoprocessors( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor value) { @@ -10131,14 +10267,14 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public Builder addCoprocessors( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { @@ -10152,14 +10288,14 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public Builder addCoprocessors( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { @@ -10173,20 +10309,21 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public Builder addAllCoprocessors( java.lang.Iterable values) { if (coprocessorsBuilder_ == null) { ensureCoprocessorsIsMutable(); - super.addAll(values, coprocessors_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, coprocessors_); onChanged(); } else { coprocessorsBuilder_.addAllMessages(values); @@ -10194,14 +10331,14 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public Builder clearCoprocessors() { if (coprocessorsBuilder_ == null) { @@ -10214,14 +10351,14 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public Builder removeCoprocessors(int index) { if (coprocessorsBuilder_ == null) { @@ -10234,28 +10371,28 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder getCoprocessorsBuilder( int index) { return getCoprocessorsFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder( int index) { @@ -10265,14 +10402,14 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public java.util.List getCoprocessorsOrBuilderList() { @@ -10283,28 +10420,28 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder() { return getCoprocessorsFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder( int index) { @@ -10312,24 +10449,24 @@ public final class ClusterStatusProtos { index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); } /** - * repeated .hbase.pb.Coprocessor coprocessors = 6; - * *
        **
        * Regionserver-level coprocessors, e.g., WALObserver implementations.
        * Region-level coprocessors, on the other hand, are stored inside RegionLoad
        * objects.
        * 
+ * + * repeated .hbase.pb.Coprocessor coprocessors = 6; */ public java.util.List getCoprocessorsBuilderList() { return getCoprocessorsFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> getCoprocessorsFieldBuilder() { if (coprocessorsBuilder_ == null) { - coprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + coprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>( coprocessors_, ((bitField0_ & 0x00000020) == 0x00000020), @@ -10340,43 +10477,42 @@ public final class ClusterStatusProtos { return coprocessorsBuilder_; } - // optional uint64 report_start_time = 7; private long reportStartTime_ ; /** - * optional uint64 report_start_time = 7; - * *
        **
        * Time when incremental (non-total) counts began being calculated (e.g. number_of_requests)
        * time is measured as the difference, measured in milliseconds, between the current time
        * and midnight, January 1, 1970 UTC.
        * 
+ * + * optional uint64 report_start_time = 7; */ public boolean hasReportStartTime() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** - * optional uint64 report_start_time = 7; - * *
        **
        * Time when incremental (non-total) counts began being calculated (e.g. number_of_requests)
        * time is measured as the difference, measured in milliseconds, between the current time
        * and midnight, January 1, 1970 UTC.
        * 
+ * + * optional uint64 report_start_time = 7; */ public long getReportStartTime() { return reportStartTime_; } /** - * optional uint64 report_start_time = 7; - * *
        **
        * Time when incremental (non-total) counts began being calculated (e.g. number_of_requests)
        * time is measured as the difference, measured in milliseconds, between the current time
        * and midnight, January 1, 1970 UTC.
        * 
+ * + * optional uint64 report_start_time = 7; */ public Builder setReportStartTime(long value) { bitField0_ |= 0x00000040; @@ -10385,14 +10521,14 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint64 report_start_time = 7; - * *
        **
        * Time when incremental (non-total) counts began being calculated (e.g. number_of_requests)
        * time is measured as the difference, measured in milliseconds, between the current time
        * and midnight, January 1, 1970 UTC.
        * 
+ * + * optional uint64 report_start_time = 7; */ public Builder clearReportStartTime() { bitField0_ = (bitField0_ & ~0x00000040); @@ -10401,43 +10537,42 @@ public final class ClusterStatusProtos { return this; } - // optional uint64 report_end_time = 8; private long reportEndTime_ ; /** - * optional uint64 report_end_time = 8; - * *
        **
        * Time when report was generated.
        * time is measured as the difference, measured in milliseconds, between the current time
        * and midnight, January 1, 1970 UTC.
        * 
+ * + * optional uint64 report_end_time = 8; */ public boolean hasReportEndTime() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** - * optional uint64 report_end_time = 8; - * *
        **
        * Time when report was generated.
        * time is measured as the difference, measured in milliseconds, between the current time
        * and midnight, January 1, 1970 UTC.
        * 
+ * + * optional uint64 report_end_time = 8; */ public long getReportEndTime() { return reportEndTime_; } /** - * optional uint64 report_end_time = 8; - * *
        **
        * Time when report was generated.
        * time is measured as the difference, measured in milliseconds, between the current time
        * and midnight, January 1, 1970 UTC.
        * 
+ * + * optional uint64 report_end_time = 8; */ public Builder setReportEndTime(long value) { bitField0_ |= 0x00000080; @@ -10446,14 +10581,14 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint64 report_end_time = 8; - * *
        **
        * Time when report was generated.
        * time is measured as the difference, measured in milliseconds, between the current time
        * and midnight, January 1, 1970 UTC.
        * 
+ * + * optional uint64 report_end_time = 8; */ public Builder clearReportEndTime() { bitField0_ = (bitField0_ & ~0x00000080); @@ -10462,37 +10597,36 @@ public final class ClusterStatusProtos { return this; } - // optional uint32 info_server_port = 9; private int infoServerPort_ ; /** - * optional uint32 info_server_port = 9; - * *
        **
        * The port number that this region server is hosing an info server on.
        * 
+ * + * optional uint32 info_server_port = 9; */ public boolean hasInfoServerPort() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** - * optional uint32 info_server_port = 9; - * *
        **
        * The port number that this region server is hosing an info server on.
        * 
+ * + * optional uint32 info_server_port = 9; */ public int getInfoServerPort() { return infoServerPort_; } /** - * optional uint32 info_server_port = 9; - * *
        **
        * The port number that this region server is hosing an info server on.
        * 
+ * + * optional uint32 info_server_port = 9; */ public Builder setInfoServerPort(int value) { bitField0_ |= 0x00000100; @@ -10501,12 +10635,12 @@ public final class ClusterStatusProtos { return this; } /** - * optional uint32 info_server_port = 9; - * *
        **
        * The port number that this region server is hosing an info server on.
        * 
+ * + * optional uint32 info_server_port = 9; */ public Builder clearInfoServerPort() { bitField0_ = (bitField0_ & ~0x00000100); @@ -10515,7 +10649,6 @@ public final class ClusterStatusProtos { return this; } - // repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; private java.util.List replLoadSource_ = java.util.Collections.emptyList(); private void ensureReplLoadSourceIsMutable() { @@ -10525,16 +10658,16 @@ public final class ClusterStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSourceOrBuilder> replLoadSourceBuilder_; /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public java.util.List getReplLoadSourceList() { if (replLoadSourceBuilder_ == null) { @@ -10544,12 +10677,12 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public int getReplLoadSourceCount() { if (replLoadSourceBuilder_ == null) { @@ -10559,12 +10692,12 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource getReplLoadSource(int index) { if (replLoadSourceBuilder_ == null) { @@ -10574,12 +10707,12 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public Builder setReplLoadSource( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource value) { @@ -10596,12 +10729,12 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public Builder setReplLoadSource( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.Builder builderForValue) { @@ -10615,12 +10748,12 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public Builder addReplLoadSource(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource value) { if (replLoadSourceBuilder_ == null) { @@ -10636,12 +10769,12 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public Builder addReplLoadSource( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource value) { @@ -10658,12 +10791,12 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public Builder addReplLoadSource( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.Builder builderForValue) { @@ -10677,12 +10810,12 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public Builder addReplLoadSource( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.Builder builderForValue) { @@ -10696,18 +10829,19 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public Builder addAllReplLoadSource( java.lang.Iterable values) { if (replLoadSourceBuilder_ == null) { ensureReplLoadSourceIsMutable(); - super.addAll(values, replLoadSource_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, replLoadSource_); onChanged(); } else { replLoadSourceBuilder_.addAllMessages(values); @@ -10715,12 +10849,12 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public Builder clearReplLoadSource() { if (replLoadSourceBuilder_ == null) { @@ -10733,12 +10867,12 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public Builder removeReplLoadSource(int index) { if (replLoadSourceBuilder_ == null) { @@ -10751,24 +10885,24 @@ public final class ClusterStatusProtos { return this; } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.Builder getReplLoadSourceBuilder( int index) { return getReplLoadSourceFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSourceOrBuilder getReplLoadSourceOrBuilder( int index) { @@ -10778,12 +10912,12 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public java.util.List getReplLoadSourceOrBuilderList() { @@ -10794,24 +10928,24 @@ public final class ClusterStatusProtos { } } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.Builder addReplLoadSourceBuilder() { return getReplLoadSourceFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.getDefaultInstance()); } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.Builder addReplLoadSourceBuilder( int index) { @@ -10819,22 +10953,22 @@ public final class ClusterStatusProtos { index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.getDefaultInstance()); } /** - * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; - * *
        **
        * The replicationLoadSource for the replication Source status of this region server.
        * 
+ * + * repeated .hbase.pb.ReplicationLoadSource replLoadSource = 10; */ public java.util.List getReplLoadSourceBuilderList() { return getReplLoadSourceFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSourceOrBuilder> getReplLoadSourceFieldBuilder() { if (replLoadSourceBuilder_ == null) { - replLoadSourceBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + replLoadSourceBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSourceOrBuilder>( replLoadSource_, ((bitField0_ & 0x00000200) == 0x00000200), @@ -10845,43 +10979,42 @@ public final class ClusterStatusProtos { return replLoadSourceBuilder_; } - // optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink replLoadSink_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink replLoadSink_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSinkOrBuilder> replLoadSinkBuilder_; /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
        **
        * The replicationLoadSink for the replication Sink status of this region server.
        * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public boolean hasReplLoadSink() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
        **
        * The replicationLoadSink for the replication Sink status of this region server.
        * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink getReplLoadSink() { if (replLoadSinkBuilder_ == null) { - return replLoadSink_; + return replLoadSink_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.getDefaultInstance() : replLoadSink_; } else { return replLoadSinkBuilder_.getMessage(); } } /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
        **
        * The replicationLoadSink for the replication Sink status of this region server.
        * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public Builder setReplLoadSink(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink value) { if (replLoadSinkBuilder_ == null) { @@ -10897,12 +11030,12 @@ public final class ClusterStatusProtos { return this; } /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
        **
        * The replicationLoadSink for the replication Sink status of this region server.
        * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public Builder setReplLoadSink( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.Builder builderForValue) { @@ -10916,16 +11049,17 @@ public final class ClusterStatusProtos { return this; } /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
        **
        * The replicationLoadSink for the replication Sink status of this region server.
        * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public Builder mergeReplLoadSink(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink value) { if (replLoadSinkBuilder_ == null) { if (((bitField0_ & 0x00000400) == 0x00000400) && + replLoadSink_ != null && replLoadSink_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.getDefaultInstance()) { replLoadSink_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.newBuilder(replLoadSink_).mergeFrom(value).buildPartial(); @@ -10940,16 +11074,16 @@ public final class ClusterStatusProtos { return this; } /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
        **
        * The replicationLoadSink for the replication Sink status of this region server.
        * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public Builder clearReplLoadSink() { if (replLoadSinkBuilder_ == null) { - replLoadSink_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.getDefaultInstance(); + replLoadSink_ = null; onChanged(); } else { replLoadSinkBuilder_.clear(); @@ -10958,12 +11092,12 @@ public final class ClusterStatusProtos { return this; } /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
        **
        * The replicationLoadSink for the replication Sink status of this region server.
        * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.Builder getReplLoadSinkBuilder() { bitField0_ |= 0x00000400; @@ -10971,57 +11105,95 @@ public final class ClusterStatusProtos { return getReplLoadSinkFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
        **
        * The replicationLoadSink for the replication Sink status of this region server.
        * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSinkOrBuilder getReplLoadSinkOrBuilder() { if (replLoadSinkBuilder_ != null) { return replLoadSinkBuilder_.getMessageOrBuilder(); } else { - return replLoadSink_; + return replLoadSink_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.getDefaultInstance() : replLoadSink_; } } /** - * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; - * *
        **
        * The replicationLoadSink for the replication Sink status of this region server.
        * 
+ * + * optional .hbase.pb.ReplicationLoadSink replLoadSink = 11; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSinkOrBuilder> getReplLoadSinkFieldBuilder() { if (replLoadSinkBuilder_ == null) { - replLoadSinkBuilder_ = new com.google.protobuf.SingleFieldBuilder< + replLoadSinkBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSinkOrBuilder>( - replLoadSink_, + getReplLoadSink(), getParentForChildren(), isClean()); replLoadSink_ = null; } return replLoadSinkBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ServerLoad) } + // @@protoc_insertion_point(class_scope:hbase.pb.ServerLoad) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad DEFAULT_INSTANCE; static { - defaultInstance = new ServerLoad(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ServerLoad parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ServerLoad(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ServerLoad) } - public interface LiveServerInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface LiveServerInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.LiveServerInfo) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ServerName server = 1; /** * required .hbase.pb.ServerName server = 1; */ @@ -11035,7 +11207,6 @@ public final class ClusterStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - // required .hbase.pb.ServerLoad server_load = 2; /** * required .hbase.pb.ServerLoad server_load = 2; */ @@ -11052,36 +11223,27 @@ public final class ClusterStatusProtos { /** * Protobuf type {@code hbase.pb.LiveServerInfo} */ - public static final class LiveServerInfo extends - com.google.protobuf.GeneratedMessage - implements LiveServerInfoOrBuilder { + public static final class LiveServerInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.LiveServerInfo) + LiveServerInfoOrBuilder { // Use LiveServerInfo.newBuilder() to construct. - private LiveServerInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + private LiveServerInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private LiveServerInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final LiveServerInfo defaultInstance; - public static LiveServerInfo getDefaultInstance() { - return defaultInstance; } - - public LiveServerInfo getDefaultInstanceForType() { - return defaultInstance; + private LiveServerInfo() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private LiveServerInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -11132,7 +11294,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11143,30 +11305,14 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_LiveServerInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_LiveServerInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public LiveServerInfo parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new LiveServerInfo(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ServerName server = 1; public static final int SERVER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_; /** @@ -11179,16 +11325,15 @@ public final class ClusterStatusProtos { * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } /** * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } - // required .hbase.pb.ServerLoad server_load = 2; public static final int SERVER_LOAD_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad serverLoad_; /** @@ -11201,23 +11346,20 @@ public final class ClusterStatusProtos { * required .hbase.pb.ServerLoad server_load = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad getServerLoad() { - return serverLoad_; + return serverLoad_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance() : serverLoad_; } /** * required .hbase.pb.ServerLoad server_load = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder getServerLoadOrBuilder() { - return serverLoad_; + return serverLoad_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance() : serverLoad_; } - private void initFields() { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - serverLoad_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasServer()) { memoizedIsInitialized = 0; @@ -11241,43 +11383,35 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, server_); + output.writeMessage(1, getServer()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, serverLoad_); + output.writeMessage(2, getServerLoad()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, server_); + .computeMessageSize(1, getServer()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, serverLoad_); + .computeMessageSize(2, getServerLoad()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -11298,12 +11432,10 @@ public final class ClusterStatusProtos { result = result && getServerLoad() .equals(other.getServerLoad()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -11319,7 +11451,7 @@ public final class ClusterStatusProtos { hash = (37 * hash) + SERVER_LOAD_FIELD_NUMBER; hash = (53 * hash) + getServerLoad().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -11347,46 +11479,57 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -11394,14 +11537,15 @@ public final class ClusterStatusProtos { * Protobuf type {@code hbase.pb.LiveServerInfo} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.LiveServerInfo) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_LiveServerInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_LiveServerInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11414,30 +11558,27 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getServerFieldBuilder(); getServerLoadFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; } else { serverBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (serverLoadBuilder_ == null) { - serverLoad_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance(); + serverLoad_ = null; } else { serverLoadBuilder_.clear(); } @@ -11445,10 +11586,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_LiveServerInfo_descriptor; @@ -11491,6 +11628,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo)other); @@ -11508,25 +11671,22 @@ public final class ClusterStatusProtos { if (other.hasServerLoad()) { mergeServerLoad(other.getServerLoad()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasServer()) { - return false; } if (!hasServerLoad()) { - return false; } if (!getServer().isInitialized()) { - return false; } if (!getServerLoad().isInitialized()) { - return false; } return true; @@ -11541,7 +11701,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11551,9 +11711,8 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required .hbase.pb.ServerName server = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; /** * required .hbase.pb.ServerName server = 1; @@ -11566,7 +11725,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } else { return serverBuilder_.getMessage(); } @@ -11607,6 +11766,7 @@ public final class ClusterStatusProtos { public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); @@ -11625,7 +11785,7 @@ public final class ClusterStatusProtos { */ public Builder clearServer() { if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; onChanged(); } else { serverBuilder_.clear(); @@ -11648,19 +11808,20 @@ public final class ClusterStatusProtos { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); } else { - return server_; + return server_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } } /** * required .hbase.pb.ServerName server = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, + getServer(), getParentForChildren(), isClean()); server_ = null; @@ -11668,9 +11829,8 @@ public final class ClusterStatusProtos { return serverBuilder_; } - // required .hbase.pb.ServerLoad server_load = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad serverLoad_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad serverLoad_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder> serverLoadBuilder_; /** * required .hbase.pb.ServerLoad server_load = 2; @@ -11683,7 +11843,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad getServerLoad() { if (serverLoadBuilder_ == null) { - return serverLoad_; + return serverLoad_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance() : serverLoad_; } else { return serverLoadBuilder_.getMessage(); } @@ -11724,6 +11884,7 @@ public final class ClusterStatusProtos { public Builder mergeServerLoad(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad value) { if (serverLoadBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + serverLoad_ != null && serverLoad_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance()) { serverLoad_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.newBuilder(serverLoad_).mergeFrom(value).buildPartial(); @@ -11742,7 +11903,7 @@ public final class ClusterStatusProtos { */ public Builder clearServerLoad() { if (serverLoadBuilder_ == null) { - serverLoad_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance(); + serverLoad_ = null; onChanged(); } else { serverLoadBuilder_.clear(); @@ -11765,41 +11926,79 @@ public final class ClusterStatusProtos { if (serverLoadBuilder_ != null) { return serverLoadBuilder_.getMessageOrBuilder(); } else { - return serverLoad_; + return serverLoad_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance() : serverLoad_; } } /** * required .hbase.pb.ServerLoad server_load = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder> getServerLoadFieldBuilder() { if (serverLoadBuilder_ == null) { - serverLoadBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverLoadBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder>( - serverLoad_, + getServerLoad(), getParentForChildren(), isClean()); serverLoad_ = null; } return serverLoadBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.LiveServerInfo) } + // @@protoc_insertion_point(class_scope:hbase.pb.LiveServerInfo) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo DEFAULT_INSTANCE; static { - defaultInstance = new LiveServerInfo(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public LiveServerInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new LiveServerInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.LiveServerInfo) } - public interface ClusterStatusOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ClusterStatusOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ClusterStatus) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.HBaseVersionFileContent hbase_version = 1; /** * optional .hbase.pb.HBaseVersionFileContent hbase_version = 1; */ @@ -11813,7 +12012,6 @@ public final class ClusterStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder(); - // repeated .hbase.pb.LiveServerInfo live_servers = 2; /** * repeated .hbase.pb.LiveServerInfo live_servers = 2; */ @@ -11838,7 +12036,6 @@ public final class ClusterStatusProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( int index); - // repeated .hbase.pb.ServerName dead_servers = 3; /** * repeated .hbase.pb.ServerName dead_servers = 3; */ @@ -11863,7 +12060,6 @@ public final class ClusterStatusProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( int index); - // repeated .hbase.pb.RegionInTransition regions_in_transition = 4; /** * repeated .hbase.pb.RegionInTransition regions_in_transition = 4; */ @@ -11888,7 +12084,6 @@ public final class ClusterStatusProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( int index); - // optional .hbase.pb.ClusterId cluster_id = 5; /** * optional .hbase.pb.ClusterId cluster_id = 5; */ @@ -11902,7 +12097,6 @@ public final class ClusterStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder(); - // repeated .hbase.pb.Coprocessor master_coprocessors = 6; /** * repeated .hbase.pb.Coprocessor master_coprocessors = 6; */ @@ -11927,7 +12121,6 @@ public final class ClusterStatusProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( int index); - // optional .hbase.pb.ServerName master = 7; /** * optional .hbase.pb.ServerName master = 7; */ @@ -11941,7 +12134,6 @@ public final class ClusterStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder(); - // repeated .hbase.pb.ServerName backup_masters = 8; /** * repeated .hbase.pb.ServerName backup_masters = 8; */ @@ -11966,7 +12158,6 @@ public final class ClusterStatusProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( int index); - // optional bool balancer_on = 9; /** * optional bool balancer_on = 9; */ @@ -11979,36 +12170,33 @@ public final class ClusterStatusProtos { /** * Protobuf type {@code hbase.pb.ClusterStatus} */ - public static final class ClusterStatus extends - com.google.protobuf.GeneratedMessage - implements ClusterStatusOrBuilder { + public static final class ClusterStatus extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ClusterStatus) + ClusterStatusOrBuilder { // Use ClusterStatus.newBuilder() to construct. - private ClusterStatus(com.google.protobuf.GeneratedMessage.Builder builder) { + private ClusterStatus(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ClusterStatus(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ClusterStatus defaultInstance; - public static ClusterStatus getDefaultInstance() { - return defaultInstance; } - - public ClusterStatus getDefaultInstanceForType() { - return defaultInstance; + private ClusterStatus() { + liveServers_ = java.util.Collections.emptyList(); + deadServers_ = java.util.Collections.emptyList(); + regionsInTransition_ = java.util.Collections.emptyList(); + masterCoprocessors_ = java.util.Collections.emptyList(); + backupMasters_ = java.util.Collections.emptyList(); + balancerOn_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ClusterStatus( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -12045,7 +12233,8 @@ public final class ClusterStatusProtos { liveServers_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - liveServers_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo.PARSER, extensionRegistry)); + liveServers_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo.PARSER, extensionRegistry)); break; } case 26: { @@ -12053,7 +12242,8 @@ public final class ClusterStatusProtos { deadServers_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - deadServers_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); + deadServers_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); break; } case 34: { @@ -12061,7 +12251,8 @@ public final class ClusterStatusProtos { regionsInTransition_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } - regionsInTransition_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition.PARSER, extensionRegistry)); + regionsInTransition_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition.PARSER, extensionRegistry)); break; } case 42: { @@ -12082,7 +12273,8 @@ public final class ClusterStatusProtos { masterCoprocessors_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000020; } - masterCoprocessors_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.PARSER, extensionRegistry)); + masterCoprocessors_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.PARSER, extensionRegistry)); break; } case 58: { @@ -12103,7 +12295,8 @@ public final class ClusterStatusProtos { backupMasters_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000080; } - backupMasters_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); + backupMasters_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); break; } case 72: { @@ -12117,7 +12310,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { liveServers_ = java.util.Collections.unmodifiableList(liveServers_); @@ -12143,30 +12336,14 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ClusterStatus_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ClusterStatus_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ClusterStatus parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ClusterStatus(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.HBaseVersionFileContent hbase_version = 1; public static final int HBASE_VERSION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_; /** @@ -12179,16 +12356,15 @@ public final class ClusterStatusProtos { * optional .hbase.pb.HBaseVersionFileContent hbase_version = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion() { - return hbaseVersion_; + return hbaseVersion_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance() : hbaseVersion_; } /** * optional .hbase.pb.HBaseVersionFileContent hbase_version = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder() { - return hbaseVersion_; + return hbaseVersion_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance() : hbaseVersion_; } - // repeated .hbase.pb.LiveServerInfo live_servers = 2; public static final int LIVE_SERVERS_FIELD_NUMBER = 2; private java.util.List liveServers_; /** @@ -12224,7 +12400,6 @@ public final class ClusterStatusProtos { return liveServers_.get(index); } - // repeated .hbase.pb.ServerName dead_servers = 3; public static final int DEAD_SERVERS_FIELD_NUMBER = 3; private java.util.List deadServers_; /** @@ -12260,7 +12435,6 @@ public final class ClusterStatusProtos { return deadServers_.get(index); } - // repeated .hbase.pb.RegionInTransition regions_in_transition = 4; public static final int REGIONS_IN_TRANSITION_FIELD_NUMBER = 4; private java.util.List regionsInTransition_; /** @@ -12296,7 +12470,6 @@ public final class ClusterStatusProtos { return regionsInTransition_.get(index); } - // optional .hbase.pb.ClusterId cluster_id = 5; public static final int CLUSTER_ID_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId clusterId_; /** @@ -12309,16 +12482,15 @@ public final class ClusterStatusProtos { * optional .hbase.pb.ClusterId cluster_id = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId getClusterId() { - return clusterId_; + return clusterId_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance() : clusterId_; } /** * optional .hbase.pb.ClusterId cluster_id = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder() { - return clusterId_; + return clusterId_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance() : clusterId_; } - // repeated .hbase.pb.Coprocessor master_coprocessors = 6; public static final int MASTER_COPROCESSORS_FIELD_NUMBER = 6; private java.util.List masterCoprocessors_; /** @@ -12354,7 +12526,6 @@ public final class ClusterStatusProtos { return masterCoprocessors_.get(index); } - // optional .hbase.pb.ServerName master = 7; public static final int MASTER_FIELD_NUMBER = 7; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName master_; /** @@ -12367,16 +12538,15 @@ public final class ClusterStatusProtos { * optional .hbase.pb.ServerName master = 7; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getMaster() { - return master_; + return master_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } /** * optional .hbase.pb.ServerName master = 7; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { - return master_; + return master_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } - // repeated .hbase.pb.ServerName backup_masters = 8; public static final int BACKUP_MASTERS_FIELD_NUMBER = 8; private java.util.List backupMasters_; /** @@ -12412,7 +12582,6 @@ public final class ClusterStatusProtos { return backupMasters_.get(index); } - // optional bool balancer_on = 9; public static final int BALANCER_ON_FIELD_NUMBER = 9; private boolean balancerOn_; /** @@ -12428,21 +12597,11 @@ public final class ClusterStatusProtos { return balancerOn_; } - private void initFields() { - hbaseVersion_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); - liveServers_ = java.util.Collections.emptyList(); - deadServers_ = java.util.Collections.emptyList(); - regionsInTransition_ = java.util.Collections.emptyList(); - clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); - masterCoprocessors_ = java.util.Collections.emptyList(); - master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - backupMasters_ = java.util.Collections.emptyList(); - balancerOn_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasHbaseVersion()) { if (!getHbaseVersion().isInitialized()) { @@ -12498,9 +12657,8 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, hbaseVersion_); + output.writeMessage(1, getHbaseVersion()); } for (int i = 0; i < liveServers_.size(); i++) { output.writeMessage(2, liveServers_.get(i)); @@ -12512,13 +12670,13 @@ public final class ClusterStatusProtos { output.writeMessage(4, regionsInTransition_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(5, clusterId_); + output.writeMessage(5, getClusterId()); } for (int i = 0; i < masterCoprocessors_.size(); i++) { output.writeMessage(6, masterCoprocessors_.get(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(7, master_); + output.writeMessage(7, getMaster()); } for (int i = 0; i < backupMasters_.size(); i++) { output.writeMessage(8, backupMasters_.get(i)); @@ -12526,18 +12684,17 @@ public final class ClusterStatusProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(9, balancerOn_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, hbaseVersion_); + .computeMessageSize(1, getHbaseVersion()); } for (int i = 0; i < liveServers_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -12553,7 +12710,7 @@ public final class ClusterStatusProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, clusterId_); + .computeMessageSize(5, getClusterId()); } for (int i = 0; i < masterCoprocessors_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -12561,7 +12718,7 @@ public final class ClusterStatusProtos { } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(7, master_); + .computeMessageSize(7, getMaster()); } for (int i = 0; i < backupMasters_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -12571,19 +12728,13 @@ public final class ClusterStatusProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(9, balancerOn_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -12624,12 +12775,10 @@ public final class ClusterStatusProtos { result = result && (getBalancerOn() == other.getBalancerOn()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -12671,9 +12820,10 @@ public final class ClusterStatusProtos { } if (hasBalancerOn()) { hash = (37 * hash) + BALANCER_ON_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getBalancerOn()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getBalancerOn()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -12701,46 +12851,57 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -12748,14 +12909,15 @@ public final class ClusterStatusProtos { * Protobuf type {@code hbase.pb.ClusterStatus} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ClusterStatus) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ClusterStatus_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ClusterStatus_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -12768,12 +12930,13 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getHbaseVersionFieldBuilder(); getLiveServersFieldBuilder(); getDeadServersFieldBuilder(); @@ -12784,14 +12947,10 @@ public final class ClusterStatusProtos { getBackupMastersFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (hbaseVersionBuilder_ == null) { - hbaseVersion_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); + hbaseVersion_ = null; } else { hbaseVersionBuilder_.clear(); } @@ -12815,7 +12974,7 @@ public final class ClusterStatusProtos { regionsInTransitionBuilder_.clear(); } if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); + clusterId_ = null; } else { clusterIdBuilder_.clear(); } @@ -12827,7 +12986,7 @@ public final class ClusterStatusProtos { masterCoprocessorsBuilder_.clear(); } if (masterBuilder_ == null) { - master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + master_ = null; } else { masterBuilder_.clear(); } @@ -12843,10 +13002,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_ClusterStatus_descriptor; @@ -12946,6 +13101,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus)other); @@ -12979,7 +13160,7 @@ public final class ClusterStatusProtos { liveServers_ = other.liveServers_; bitField0_ = (bitField0_ & ~0x00000002); liveServersBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getLiveServersFieldBuilder() : null; } else { liveServersBuilder_.addAllMessages(other.liveServers_); @@ -13005,7 +13186,7 @@ public final class ClusterStatusProtos { deadServers_ = other.deadServers_; bitField0_ = (bitField0_ & ~0x00000004); deadServersBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getDeadServersFieldBuilder() : null; } else { deadServersBuilder_.addAllMessages(other.deadServers_); @@ -13031,7 +13212,7 @@ public final class ClusterStatusProtos { regionsInTransition_ = other.regionsInTransition_; bitField0_ = (bitField0_ & ~0x00000008); regionsInTransitionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionsInTransitionFieldBuilder() : null; } else { regionsInTransitionBuilder_.addAllMessages(other.regionsInTransition_); @@ -13060,7 +13241,7 @@ public final class ClusterStatusProtos { masterCoprocessors_ = other.masterCoprocessors_; bitField0_ = (bitField0_ & ~0x00000020); masterCoprocessorsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMasterCoprocessorsFieldBuilder() : null; } else { masterCoprocessorsBuilder_.addAllMessages(other.masterCoprocessors_); @@ -13089,7 +13270,7 @@ public final class ClusterStatusProtos { backupMasters_ = other.backupMasters_; bitField0_ = (bitField0_ & ~0x00000080); backupMastersBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getBackupMastersFieldBuilder() : null; } else { backupMastersBuilder_.addAllMessages(other.backupMasters_); @@ -13099,56 +13280,49 @@ public final class ClusterStatusProtos { if (other.hasBalancerOn()) { setBalancerOn(other.getBalancerOn()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasHbaseVersion()) { if (!getHbaseVersion().isInitialized()) { - return false; } } for (int i = 0; i < getLiveServersCount(); i++) { if (!getLiveServers(i).isInitialized()) { - return false; } } for (int i = 0; i < getDeadServersCount(); i++) { if (!getDeadServers(i).isInitialized()) { - return false; } } for (int i = 0; i < getRegionsInTransitionCount(); i++) { if (!getRegionsInTransition(i).isInitialized()) { - return false; } } if (hasClusterId()) { if (!getClusterId().isInitialized()) { - return false; } } for (int i = 0; i < getMasterCoprocessorsCount(); i++) { if (!getMasterCoprocessors(i).isInitialized()) { - return false; } } if (hasMaster()) { if (!getMaster().isInitialized()) { - return false; } } for (int i = 0; i < getBackupMastersCount(); i++) { if (!getBackupMasters(i).isInitialized()) { - return false; } } @@ -13164,7 +13338,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -13174,9 +13348,8 @@ public final class ClusterStatusProtos { } private int bitField0_; - // optional .hbase.pb.HBaseVersionFileContent hbase_version = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder> hbaseVersionBuilder_; /** * optional .hbase.pb.HBaseVersionFileContent hbase_version = 1; @@ -13189,7 +13362,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion() { if (hbaseVersionBuilder_ == null) { - return hbaseVersion_; + return hbaseVersion_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance() : hbaseVersion_; } else { return hbaseVersionBuilder_.getMessage(); } @@ -13230,6 +13403,7 @@ public final class ClusterStatusProtos { public Builder mergeHbaseVersion(org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent value) { if (hbaseVersionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + hbaseVersion_ != null && hbaseVersion_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance()) { hbaseVersion_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder(hbaseVersion_).mergeFrom(value).buildPartial(); @@ -13248,7 +13422,7 @@ public final class ClusterStatusProtos { */ public Builder clearHbaseVersion() { if (hbaseVersionBuilder_ == null) { - hbaseVersion_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); + hbaseVersion_ = null; onChanged(); } else { hbaseVersionBuilder_.clear(); @@ -13271,19 +13445,20 @@ public final class ClusterStatusProtos { if (hbaseVersionBuilder_ != null) { return hbaseVersionBuilder_.getMessageOrBuilder(); } else { - return hbaseVersion_; + return hbaseVersion_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance() : hbaseVersion_; } } /** * optional .hbase.pb.HBaseVersionFileContent hbase_version = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder> getHbaseVersionFieldBuilder() { if (hbaseVersionBuilder_ == null) { - hbaseVersionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + hbaseVersionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder>( - hbaseVersion_, + getHbaseVersion(), getParentForChildren(), isClean()); hbaseVersion_ = null; @@ -13291,7 +13466,6 @@ public final class ClusterStatusProtos { return hbaseVersionBuilder_; } - // repeated .hbase.pb.LiveServerInfo live_servers = 2; private java.util.List liveServers_ = java.util.Collections.emptyList(); private void ensureLiveServersIsMutable() { @@ -13301,7 +13475,7 @@ public final class ClusterStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> liveServersBuilder_; /** @@ -13433,7 +13607,8 @@ public final class ClusterStatusProtos { java.lang.Iterable values) { if (liveServersBuilder_ == null) { ensureLiveServersIsMutable(); - super.addAll(values, liveServers_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, liveServers_); onChanged(); } else { liveServersBuilder_.addAllMessages(values); @@ -13516,11 +13691,11 @@ public final class ClusterStatusProtos { getLiveServersBuilderList() { return getLiveServersFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> getLiveServersFieldBuilder() { if (liveServersBuilder_ == null) { - liveServersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + liveServersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder>( liveServers_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -13531,7 +13706,6 @@ public final class ClusterStatusProtos { return liveServersBuilder_; } - // repeated .hbase.pb.ServerName dead_servers = 3; private java.util.List deadServers_ = java.util.Collections.emptyList(); private void ensureDeadServersIsMutable() { @@ -13541,7 +13715,7 @@ public final class ClusterStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> deadServersBuilder_; /** @@ -13673,7 +13847,8 @@ public final class ClusterStatusProtos { java.lang.Iterable values) { if (deadServersBuilder_ == null) { ensureDeadServersIsMutable(); - super.addAll(values, deadServers_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, deadServers_); onChanged(); } else { deadServersBuilder_.addAllMessages(values); @@ -13756,11 +13931,11 @@ public final class ClusterStatusProtos { getDeadServersBuilderList() { return getDeadServersFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getDeadServersFieldBuilder() { if (deadServersBuilder_ == null) { - deadServersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + deadServersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( deadServers_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -13771,7 +13946,6 @@ public final class ClusterStatusProtos { return deadServersBuilder_; } - // repeated .hbase.pb.RegionInTransition regions_in_transition = 4; private java.util.List regionsInTransition_ = java.util.Collections.emptyList(); private void ensureRegionsInTransitionIsMutable() { @@ -13781,7 +13955,7 @@ public final class ClusterStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> regionsInTransitionBuilder_; /** @@ -13913,7 +14087,8 @@ public final class ClusterStatusProtos { java.lang.Iterable values) { if (regionsInTransitionBuilder_ == null) { ensureRegionsInTransitionIsMutable(); - super.addAll(values, regionsInTransition_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionsInTransition_); onChanged(); } else { regionsInTransitionBuilder_.addAllMessages(values); @@ -13996,11 +14171,11 @@ public final class ClusterStatusProtos { getRegionsInTransitionBuilderList() { return getRegionsInTransitionFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> getRegionsInTransitionFieldBuilder() { if (regionsInTransitionBuilder_ == null) { - regionsInTransitionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionsInTransitionBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder>( regionsInTransition_, ((bitField0_ & 0x00000008) == 0x00000008), @@ -14011,9 +14186,8 @@ public final class ClusterStatusProtos { return regionsInTransitionBuilder_; } - // optional .hbase.pb.ClusterId cluster_id = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId clusterId_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder> clusterIdBuilder_; /** * optional .hbase.pb.ClusterId cluster_id = 5; @@ -14026,7 +14200,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId getClusterId() { if (clusterIdBuilder_ == null) { - return clusterId_; + return clusterId_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance() : clusterId_; } else { return clusterIdBuilder_.getMessage(); } @@ -14067,6 +14241,7 @@ public final class ClusterStatusProtos { public Builder mergeClusterId(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId value) { if (clusterIdBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && + clusterId_ != null && clusterId_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) { clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder(clusterId_).mergeFrom(value).buildPartial(); @@ -14085,7 +14260,7 @@ public final class ClusterStatusProtos { */ public Builder clearClusterId() { if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); + clusterId_ = null; onChanged(); } else { clusterIdBuilder_.clear(); @@ -14108,19 +14283,20 @@ public final class ClusterStatusProtos { if (clusterIdBuilder_ != null) { return clusterIdBuilder_.getMessageOrBuilder(); } else { - return clusterId_; + return clusterId_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance() : clusterId_; } } /** * optional .hbase.pb.ClusterId cluster_id = 5; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder> getClusterIdFieldBuilder() { if (clusterIdBuilder_ == null) { - clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< + clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder>( - clusterId_, + getClusterId(), getParentForChildren(), isClean()); clusterId_ = null; @@ -14128,7 +14304,6 @@ public final class ClusterStatusProtos { return clusterIdBuilder_; } - // repeated .hbase.pb.Coprocessor master_coprocessors = 6; private java.util.List masterCoprocessors_ = java.util.Collections.emptyList(); private void ensureMasterCoprocessorsIsMutable() { @@ -14138,7 +14313,7 @@ public final class ClusterStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> masterCoprocessorsBuilder_; /** @@ -14270,7 +14445,8 @@ public final class ClusterStatusProtos { java.lang.Iterable values) { if (masterCoprocessorsBuilder_ == null) { ensureMasterCoprocessorsIsMutable(); - super.addAll(values, masterCoprocessors_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, masterCoprocessors_); onChanged(); } else { masterCoprocessorsBuilder_.addAllMessages(values); @@ -14353,11 +14529,11 @@ public final class ClusterStatusProtos { getMasterCoprocessorsBuilderList() { return getMasterCoprocessorsFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> getMasterCoprocessorsFieldBuilder() { if (masterCoprocessorsBuilder_ == null) { - masterCoprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + masterCoprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>( masterCoprocessors_, ((bitField0_ & 0x00000020) == 0x00000020), @@ -14368,9 +14544,8 @@ public final class ClusterStatusProtos { return masterCoprocessorsBuilder_; } - // optional .hbase.pb.ServerName master = 7; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName master_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_; /** * optional .hbase.pb.ServerName master = 7; @@ -14383,7 +14558,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getMaster() { if (masterBuilder_ == null) { - return master_; + return master_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } else { return masterBuilder_.getMessage(); } @@ -14424,6 +14599,7 @@ public final class ClusterStatusProtos { public Builder mergeMaster(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { if (((bitField0_ & 0x00000040) == 0x00000040) && + master_ != null && master_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial(); @@ -14442,7 +14618,7 @@ public final class ClusterStatusProtos { */ public Builder clearMaster() { if (masterBuilder_ == null) { - master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + master_ = null; onChanged(); } else { masterBuilder_.clear(); @@ -14465,19 +14641,20 @@ public final class ClusterStatusProtos { if (masterBuilder_ != null) { return masterBuilder_.getMessageOrBuilder(); } else { - return master_; + return master_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } } /** * optional .hbase.pb.ServerName master = 7; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getMasterFieldBuilder() { if (masterBuilder_ == null) { - masterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + masterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - master_, + getMaster(), getParentForChildren(), isClean()); master_ = null; @@ -14485,7 +14662,6 @@ public final class ClusterStatusProtos { return masterBuilder_; } - // repeated .hbase.pb.ServerName backup_masters = 8; private java.util.List backupMasters_ = java.util.Collections.emptyList(); private void ensureBackupMastersIsMutable() { @@ -14495,7 +14671,7 @@ public final class ClusterStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> backupMastersBuilder_; /** @@ -14627,7 +14803,8 @@ public final class ClusterStatusProtos { java.lang.Iterable values) { if (backupMastersBuilder_ == null) { ensureBackupMastersIsMutable(); - super.addAll(values, backupMasters_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, backupMasters_); onChanged(); } else { backupMastersBuilder_.addAllMessages(values); @@ -14710,11 +14887,11 @@ public final class ClusterStatusProtos { getBackupMastersBuilderList() { return getBackupMastersFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getBackupMastersFieldBuilder() { if (backupMastersBuilder_ == null) { - backupMastersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + backupMastersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( backupMasters_, ((bitField0_ & 0x00000080) == 0x00000080), @@ -14725,7 +14902,6 @@ public final class ClusterStatusProtos { return backupMastersBuilder_; } - // optional bool balancer_on = 9; private boolean balancerOn_ ; /** * optional bool balancer_on = 9; @@ -14757,74 +14933,111 @@ public final class ClusterStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ClusterStatus) } + // @@protoc_insertion_point(class_scope:hbase.pb.ClusterStatus) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus DEFAULT_INSTANCE; static { - defaultInstance = new ClusterStatus(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ClusterStatus parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClusterStatus(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ClusterStatus) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionState_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionState_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionInTransition_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionInTransition_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StoreSequenceId_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_StoreSequenceId_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionStoreSequenceIds_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionStoreSequenceIds_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionLoad_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionLoad_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReplicationLoadSink_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReplicationLoadSink_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReplicationLoadSource_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReplicationLoadSource_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ServerLoad_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ServerLoad_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_LiveServerInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_LiveServerInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ClusterStatus_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ClusterStatus_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -14895,73 +15108,13 @@ public final class ClusterStatusProtos { "erStatusProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_RegionState_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_RegionState_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionState_descriptor, - new java.lang.String[] { "RegionInfo", "State", "Stamp", }); - internal_static_hbase_pb_RegionInTransition_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_RegionInTransition_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionInTransition_descriptor, - new java.lang.String[] { "Spec", "RegionState", }); - internal_static_hbase_pb_StoreSequenceId_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_StoreSequenceId_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_StoreSequenceId_descriptor, - new java.lang.String[] { "FamilyName", "SequenceId", }); - internal_static_hbase_pb_RegionStoreSequenceIds_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_RegionStoreSequenceIds_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionStoreSequenceIds_descriptor, - new java.lang.String[] { "LastFlushedSequenceId", "StoreSequenceId", }); - internal_static_hbase_pb_RegionLoad_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_RegionLoad_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionLoad_descriptor, - new java.lang.String[] { "RegionSpecifier", "Stores", "Storefiles", "StoreUncompressedSizeMB", "StorefileSizeMB", "MemstoreSizeMB", "StorefileIndexSizeMB", "ReadRequestsCount", "WriteRequestsCount", "TotalCompactingKVs", "CurrentCompactedKVs", "RootIndexSizeKB", "TotalStaticIndexSizeKB", "TotalStaticBloomSizeKB", "CompleteSequenceId", "DataLocality", "LastMajorCompactionTs", "StoreCompleteSequenceId", "FilteredReadRequestsCount", }); - internal_static_hbase_pb_ReplicationLoadSink_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_ReplicationLoadSink_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReplicationLoadSink_descriptor, - new java.lang.String[] { "AgeOfLastAppliedOp", "TimeStampsOfLastAppliedOp", }); - internal_static_hbase_pb_ReplicationLoadSource_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_ReplicationLoadSource_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReplicationLoadSource_descriptor, - new java.lang.String[] { "PeerID", "AgeOfLastShippedOp", "SizeOfLogQueue", "TimeStampOfLastShippedOp", "ReplicationLag", }); - internal_static_hbase_pb_ServerLoad_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_ServerLoad_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ServerLoad_descriptor, - new java.lang.String[] { "NumberOfRequests", "TotalNumberOfRequests", "UsedHeapMB", "MaxHeapMB", "RegionLoads", "Coprocessors", "ReportStartTime", "ReportEndTime", "InfoServerPort", "ReplLoadSource", "ReplLoadSink", }); - internal_static_hbase_pb_LiveServerInfo_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_LiveServerInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_LiveServerInfo_descriptor, - new java.lang.String[] { "Server", "ServerLoad", }); - internal_static_hbase_pb_ClusterStatus_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_hbase_pb_ClusterStatus_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ClusterStatus_descriptor, - new java.lang.String[] { "HbaseVersion", "LiveServers", "DeadServers", "RegionsInTransition", "ClusterId", "MasterCoprocessors", "Master", "BackupMasters", "BalancerOn", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { @@ -14969,6 +15122,69 @@ public final class ClusterStatusProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_RegionState_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_RegionState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionState_descriptor, + new java.lang.String[] { "RegionInfo", "State", "Stamp", }); + internal_static_hbase_pb_RegionInTransition_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_RegionInTransition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionInTransition_descriptor, + new java.lang.String[] { "Spec", "RegionState", }); + internal_static_hbase_pb_StoreSequenceId_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_StoreSequenceId_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_StoreSequenceId_descriptor, + new java.lang.String[] { "FamilyName", "SequenceId", }); + internal_static_hbase_pb_RegionStoreSequenceIds_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_RegionStoreSequenceIds_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionStoreSequenceIds_descriptor, + new java.lang.String[] { "LastFlushedSequenceId", "StoreSequenceId", }); + internal_static_hbase_pb_RegionLoad_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_RegionLoad_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionLoad_descriptor, + new java.lang.String[] { "RegionSpecifier", "Stores", "Storefiles", "StoreUncompressedSizeMB", "StorefileSizeMB", "MemstoreSizeMB", "StorefileIndexSizeMB", "ReadRequestsCount", "WriteRequestsCount", "TotalCompactingKVs", "CurrentCompactedKVs", "RootIndexSizeKB", "TotalStaticIndexSizeKB", "TotalStaticBloomSizeKB", "CompleteSequenceId", "DataLocality", "LastMajorCompactionTs", "StoreCompleteSequenceId", "FilteredReadRequestsCount", }); + internal_static_hbase_pb_ReplicationLoadSink_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_ReplicationLoadSink_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReplicationLoadSink_descriptor, + new java.lang.String[] { "AgeOfLastAppliedOp", "TimeStampsOfLastAppliedOp", }); + internal_static_hbase_pb_ReplicationLoadSource_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_ReplicationLoadSource_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReplicationLoadSource_descriptor, + new java.lang.String[] { "PeerID", "AgeOfLastShippedOp", "SizeOfLogQueue", "TimeStampOfLastShippedOp", "ReplicationLag", }); + internal_static_hbase_pb_ServerLoad_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_ServerLoad_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ServerLoad_descriptor, + new java.lang.String[] { "NumberOfRequests", "TotalNumberOfRequests", "UsedHeapMB", "MaxHeapMB", "RegionLoads", "Coprocessors", "ReportStartTime", "ReportEndTime", "InfoServerPort", "ReplLoadSource", "ReplLoadSink", }); + internal_static_hbase_pb_LiveServerInfo_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_LiveServerInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_LiveServerInfo_descriptor, + new java.lang.String[] { "Server", "ServerLoad", }); + internal_static_hbase_pb_ClusterStatus_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_hbase_pb_ClusterStatus_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ClusterStatus_descriptor, + new java.lang.String[] { "HbaseVersion", "LiveServers", "DeadServers", "RegionsInTransition", "ClusterId", "MasterCoprocessors", "Master", "BackupMasters", "BalancerOn", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java index aa8e31c..e1c605d 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class ComparatorProtos { private ComparatorProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface ComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ComparatorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Comparator) + com.google.protobuf.MessageOrBuilder { - // required string name = 1; /** * required string name = 1; */ @@ -26,7 +32,6 @@ public final class ComparatorProtos { com.google.protobuf.ByteString getNameBytes(); - // optional bytes serialized_comparator = 2; /** * optional bytes serialized_comparator = 2; */ @@ -39,36 +44,29 @@ public final class ComparatorProtos { /** * Protobuf type {@code hbase.pb.Comparator} */ - public static final class Comparator extends - com.google.protobuf.GeneratedMessage - implements ComparatorOrBuilder { + public static final class Comparator extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Comparator) + ComparatorOrBuilder { // Use Comparator.newBuilder() to construct. - private Comparator(com.google.protobuf.GeneratedMessage.Builder builder) { + private Comparator(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private Comparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Comparator defaultInstance; - public static Comparator getDefaultInstance() { - return defaultInstance; - } - - public Comparator getDefaultInstanceForType() { - return defaultInstance; + private Comparator() { + name_ = ""; + serializedComparator_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Comparator( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -88,8 +86,9 @@ public final class ComparatorProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - name_ = input.readBytes(); + name_ = bs; break; } case 18: { @@ -103,7 +102,7 @@ public final class ComparatorProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -114,32 +113,16 @@ public final class ComparatorProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_Comparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_Comparator_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Comparator parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Comparator(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; + private volatile java.lang.Object name_; /** * required string name = 1; */ @@ -180,7 +163,6 @@ public final class ComparatorProtos { } } - // optional bytes serialized_comparator = 2; public static final int SERIALIZED_COMPARATOR_FIELD_NUMBER = 2; private com.google.protobuf.ByteString serializedComparator_; /** @@ -196,14 +178,11 @@ public final class ComparatorProtos { return serializedComparator_; } - private void initFields() { - name_ = ""; - serializedComparator_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; @@ -215,43 +194,34 @@ public final class ComparatorProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, serializedComparator_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, serializedComparator_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -272,12 +242,10 @@ public final class ComparatorProtos { result = result && getSerializedComparator() .equals(other.getSerializedComparator()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -293,7 +261,7 @@ public final class ComparatorProtos { hash = (37 * hash) + SERIALIZED_COMPARATOR_FIELD_NUMBER; hash = (53 * hash) + getSerializedComparator().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -321,46 +289,57 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -368,14 +347,15 @@ public final class ComparatorProtos { * Protobuf type {@code hbase.pb.Comparator} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Comparator) + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_Comparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_Comparator_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -388,18 +368,15 @@ public final class ComparatorProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); name_ = ""; @@ -409,10 +386,6 @@ public final class ComparatorProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_Comparator_descriptor; @@ -447,6 +420,32 @@ public final class ComparatorProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator)other); @@ -466,13 +465,13 @@ public final class ComparatorProtos { if (other.hasSerializedComparator()) { setSerializedComparator(other.getSerializedComparator()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { - return false; } return true; @@ -487,7 +486,7 @@ public final class ComparatorProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -497,7 +496,6 @@ public final class ComparatorProtos { } private int bitField0_; - // required string name = 1; private java.lang.Object name_ = ""; /** * required string name = 1; @@ -511,9 +509,12 @@ public final class ComparatorProtos { public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - name_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; + } return s; } else { return (java.lang.String) ref; @@ -571,7 +572,6 @@ public final class ComparatorProtos { return this; } - // optional bytes serialized_comparator = 2; private com.google.protobuf.ByteString serializedComparator_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes serialized_comparator = 2; @@ -606,22 +606,59 @@ public final class ComparatorProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Comparator) } + // @@protoc_insertion_point(class_scope:hbase.pb.Comparator) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator DEFAULT_INSTANCE; static { - defaultInstance = new Comparator(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Comparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Comparator(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Comparator) } - public interface ByteArrayComparableOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ByteArrayComparableOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ByteArrayComparable) + com.google.protobuf.MessageOrBuilder { - // optional bytes value = 1; /** * optional bytes value = 1; */ @@ -634,36 +671,28 @@ public final class ComparatorProtos { /** * Protobuf type {@code hbase.pb.ByteArrayComparable} */ - public static final class ByteArrayComparable extends - com.google.protobuf.GeneratedMessage - implements ByteArrayComparableOrBuilder { + public static final class ByteArrayComparable extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ByteArrayComparable) + ByteArrayComparableOrBuilder { // Use ByteArrayComparable.newBuilder() to construct. - private ByteArrayComparable(com.google.protobuf.GeneratedMessage.Builder builder) { + private ByteArrayComparable(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ByteArrayComparable(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ByteArrayComparable defaultInstance; - public static ByteArrayComparable getDefaultInstance() { - return defaultInstance; - } - - public ByteArrayComparable getDefaultInstanceForType() { - return defaultInstance; + private ByteArrayComparable() { + value_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ByteArrayComparable( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -693,7 +722,7 @@ public final class ComparatorProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -704,30 +733,14 @@ public final class ComparatorProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_ByteArrayComparable_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_ByteArrayComparable_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ByteArrayComparable parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ByteArrayComparable(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bytes value = 1; public static final int VALUE_FIELD_NUMBER = 1; private com.google.protobuf.ByteString value_; /** @@ -743,13 +756,11 @@ public final class ComparatorProtos { return value_; } - private void initFields() { - value_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -757,16 +768,14 @@ public final class ComparatorProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, value_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -774,19 +783,13 @@ public final class ComparatorProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, value_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -802,12 +805,10 @@ public final class ComparatorProtos { result = result && getValue() .equals(other.getValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -819,7 +820,7 @@ public final class ComparatorProtos { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -847,46 +848,57 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -894,14 +906,15 @@ public final class ComparatorProtos { * Protobuf type {@code hbase.pb.ByteArrayComparable} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ByteArrayComparable) + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_ByteArrayComparable_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_ByteArrayComparable_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -914,18 +927,15 @@ public final class ComparatorProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); value_ = com.google.protobuf.ByteString.EMPTY; @@ -933,10 +943,6 @@ public final class ComparatorProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_ByteArrayComparable_descriptor; @@ -967,6 +973,32 @@ public final class ComparatorProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable)other); @@ -981,7 +1013,8 @@ public final class ComparatorProtos { if (other.hasValue()) { setValue(other.getValue()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -998,7 +1031,7 @@ public final class ComparatorProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1008,7 +1041,6 @@ public final class ComparatorProtos { } private int bitField0_; - // optional bytes value = 1; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes value = 1; @@ -1043,22 +1075,59 @@ public final class ComparatorProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ByteArrayComparable) } + // @@protoc_insertion_point(class_scope:hbase.pb.ByteArrayComparable) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable DEFAULT_INSTANCE; static { - defaultInstance = new ByteArrayComparable(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ByteArrayComparable parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ByteArrayComparable(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ByteArrayComparable) } - public interface BinaryComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface BinaryComparatorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BinaryComparator) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ByteArrayComparable comparable = 1; /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ @@ -1075,36 +1144,27 @@ public final class ComparatorProtos { /** * Protobuf type {@code hbase.pb.BinaryComparator} */ - public static final class BinaryComparator extends - com.google.protobuf.GeneratedMessage - implements BinaryComparatorOrBuilder { + public static final class BinaryComparator extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BinaryComparator) + BinaryComparatorOrBuilder { // Use BinaryComparator.newBuilder() to construct. - private BinaryComparator(com.google.protobuf.GeneratedMessage.Builder builder) { + private BinaryComparator(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private BinaryComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BinaryComparator defaultInstance; - public static BinaryComparator getDefaultInstance() { - return defaultInstance; + private BinaryComparator() { } - public BinaryComparator getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private BinaryComparator( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1142,7 +1202,7 @@ public final class ComparatorProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1153,30 +1213,14 @@ public final class ComparatorProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BinaryComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BinaryComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BinaryComparator parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BinaryComparator(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ByteArrayComparable comparable = 1; public static final int COMPARABLE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; /** @@ -1189,22 +1233,20 @@ public final class ComparatorProtos { * required .hbase.pb.ByteArrayComparable comparable = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } - private void initFields() { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasComparable()) { memoizedIsInitialized = 0; @@ -1216,36 +1258,28 @@ public final class ComparatorProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, comparable_); + output.writeMessage(1, getComparable()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, comparable_); + .computeMessageSize(1, getComparable()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1261,12 +1295,10 @@ public final class ComparatorProtos { result = result && getComparable() .equals(other.getComparable()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1278,7 +1310,7 @@ public final class ComparatorProtos { hash = (37 * hash) + COMPARABLE_FIELD_NUMBER; hash = (53 * hash) + getComparable().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1306,46 +1338,57 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1353,14 +1396,15 @@ public final class ComparatorProtos { * Protobuf type {@code hbase.pb.BinaryComparator} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparatorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BinaryComparator) + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparatorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BinaryComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BinaryComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1373,23 +1417,20 @@ public final class ComparatorProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getComparableFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + comparable_ = null; } else { comparableBuilder_.clear(); } @@ -1397,10 +1438,6 @@ public final class ComparatorProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BinaryComparator_descriptor; @@ -1435,6 +1472,32 @@ public final class ComparatorProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator)other); @@ -1449,13 +1512,13 @@ public final class ComparatorProtos { if (other.hasComparable()) { mergeComparable(other.getComparable()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasComparable()) { - return false; } return true; @@ -1470,7 +1533,7 @@ public final class ComparatorProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1480,9 +1543,8 @@ public final class ComparatorProtos { } private int bitField0_; - // required .hbase.pb.ByteArrayComparable comparable = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; /** * required .hbase.pb.ByteArrayComparable comparable = 1; @@ -1495,7 +1557,7 @@ public final class ComparatorProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { if (comparableBuilder_ == null) { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } else { return comparableBuilder_.getMessage(); } @@ -1536,6 +1598,7 @@ public final class ComparatorProtos { public Builder mergeComparable(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { if (comparableBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + comparable_ != null && comparable_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); @@ -1554,7 +1617,7 @@ public final class ComparatorProtos { */ public Builder clearComparable() { if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + comparable_ = null; onChanged(); } else { comparableBuilder_.clear(); @@ -1577,41 +1640,79 @@ public final class ComparatorProtos { if (comparableBuilder_ != null) { return comparableBuilder_.getMessageOrBuilder(); } else { - return comparable_; + return comparable_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } } /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> getComparableFieldBuilder() { if (comparableBuilder_ == null) { - comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< + comparableBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( - comparable_, + getComparable(), getParentForChildren(), isClean()); comparable_ = null; } return comparableBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BinaryComparator) } + // @@protoc_insertion_point(class_scope:hbase.pb.BinaryComparator) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator DEFAULT_INSTANCE; static { - defaultInstance = new BinaryComparator(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public BinaryComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BinaryComparator(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BinaryComparator) } - public interface LongComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface LongComparatorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.LongComparator) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ByteArrayComparable comparable = 1; /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ @@ -1628,36 +1729,27 @@ public final class ComparatorProtos { /** * Protobuf type {@code hbase.pb.LongComparator} */ - public static final class LongComparator extends - com.google.protobuf.GeneratedMessage - implements LongComparatorOrBuilder { + public static final class LongComparator extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.LongComparator) + LongComparatorOrBuilder { // Use LongComparator.newBuilder() to construct. - private LongComparator(com.google.protobuf.GeneratedMessage.Builder builder) { + private LongComparator(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private LongComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final LongComparator defaultInstance; - public static LongComparator getDefaultInstance() { - return defaultInstance; - } - - public LongComparator getDefaultInstanceForType() { - return defaultInstance; + private LongComparator() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private LongComparator( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1695,7 +1787,7 @@ public final class ComparatorProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1706,30 +1798,14 @@ public final class ComparatorProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_LongComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_LongComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public LongComparator parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new LongComparator(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ByteArrayComparable comparable = 1; public static final int COMPARABLE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; /** @@ -1742,22 +1818,20 @@ public final class ComparatorProtos { * required .hbase.pb.ByteArrayComparable comparable = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } - private void initFields() { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasComparable()) { memoizedIsInitialized = 0; @@ -1769,36 +1843,28 @@ public final class ComparatorProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, comparable_); + output.writeMessage(1, getComparable()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, comparable_); + .computeMessageSize(1, getComparable()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1814,12 +1880,10 @@ public final class ComparatorProtos { result = result && getComparable() .equals(other.getComparable()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1831,7 +1895,7 @@ public final class ComparatorProtos { hash = (37 * hash) + COMPARABLE_FIELD_NUMBER; hash = (53 * hash) + getComparable().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1859,46 +1923,57 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1906,14 +1981,15 @@ public final class ComparatorProtos { * Protobuf type {@code hbase.pb.LongComparator} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparatorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.LongComparator) + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparatorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_LongComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_LongComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1926,23 +2002,20 @@ public final class ComparatorProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getComparableFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + comparable_ = null; } else { comparableBuilder_.clear(); } @@ -1950,10 +2023,6 @@ public final class ComparatorProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_LongComparator_descriptor; @@ -1988,6 +2057,32 @@ public final class ComparatorProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator)other); @@ -2002,13 +2097,13 @@ public final class ComparatorProtos { if (other.hasComparable()) { mergeComparable(other.getComparable()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasComparable()) { - return false; } return true; @@ -2023,7 +2118,7 @@ public final class ComparatorProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2033,9 +2128,8 @@ public final class ComparatorProtos { } private int bitField0_; - // required .hbase.pb.ByteArrayComparable comparable = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; /** * required .hbase.pb.ByteArrayComparable comparable = 1; @@ -2048,7 +2142,7 @@ public final class ComparatorProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { if (comparableBuilder_ == null) { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } else { return comparableBuilder_.getMessage(); } @@ -2089,6 +2183,7 @@ public final class ComparatorProtos { public Builder mergeComparable(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { if (comparableBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + comparable_ != null && comparable_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); @@ -2107,7 +2202,7 @@ public final class ComparatorProtos { */ public Builder clearComparable() { if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + comparable_ = null; onChanged(); } else { comparableBuilder_.clear(); @@ -2130,41 +2225,79 @@ public final class ComparatorProtos { if (comparableBuilder_ != null) { return comparableBuilder_.getMessageOrBuilder(); } else { - return comparable_; + return comparable_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } } /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> getComparableFieldBuilder() { if (comparableBuilder_ == null) { - comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< + comparableBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( - comparable_, + getComparable(), getParentForChildren(), isClean()); comparable_ = null; } return comparableBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.LongComparator) } + // @@protoc_insertion_point(class_scope:hbase.pb.LongComparator) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator DEFAULT_INSTANCE; static { - defaultInstance = new LongComparator(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public LongComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new LongComparator(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.LongComparator) } - public interface BinaryPrefixComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface BinaryPrefixComparatorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BinaryPrefixComparator) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ByteArrayComparable comparable = 1; /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ @@ -2181,36 +2314,27 @@ public final class ComparatorProtos { /** * Protobuf type {@code hbase.pb.BinaryPrefixComparator} */ - public static final class BinaryPrefixComparator extends - com.google.protobuf.GeneratedMessage - implements BinaryPrefixComparatorOrBuilder { + public static final class BinaryPrefixComparator extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BinaryPrefixComparator) + BinaryPrefixComparatorOrBuilder { // Use BinaryPrefixComparator.newBuilder() to construct. - private BinaryPrefixComparator(com.google.protobuf.GeneratedMessage.Builder builder) { + private BinaryPrefixComparator(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private BinaryPrefixComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BinaryPrefixComparator defaultInstance; - public static BinaryPrefixComparator getDefaultInstance() { - return defaultInstance; } - - public BinaryPrefixComparator getDefaultInstanceForType() { - return defaultInstance; + private BinaryPrefixComparator() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private BinaryPrefixComparator( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2248,7 +2372,7 @@ public final class ComparatorProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2259,30 +2383,14 @@ public final class ComparatorProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BinaryPrefixComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BinaryPrefixComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BinaryPrefixComparator parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BinaryPrefixComparator(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ByteArrayComparable comparable = 1; public static final int COMPARABLE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; /** @@ -2295,22 +2403,20 @@ public final class ComparatorProtos { * required .hbase.pb.ByteArrayComparable comparable = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } - private void initFields() { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasComparable()) { memoizedIsInitialized = 0; @@ -2322,36 +2428,28 @@ public final class ComparatorProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, comparable_); + output.writeMessage(1, getComparable()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, comparable_); + .computeMessageSize(1, getComparable()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2367,12 +2465,10 @@ public final class ComparatorProtos { result = result && getComparable() .equals(other.getComparable()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2384,7 +2480,7 @@ public final class ComparatorProtos { hash = (37 * hash) + COMPARABLE_FIELD_NUMBER; hash = (53 * hash) + getComparable().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2412,46 +2508,57 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2459,14 +2566,15 @@ public final class ComparatorProtos { * Protobuf type {@code hbase.pb.BinaryPrefixComparator} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparatorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BinaryPrefixComparator) + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparatorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BinaryPrefixComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BinaryPrefixComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2479,23 +2587,20 @@ public final class ComparatorProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getComparableFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + comparable_ = null; } else { comparableBuilder_.clear(); } @@ -2503,10 +2608,6 @@ public final class ComparatorProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BinaryPrefixComparator_descriptor; @@ -2541,6 +2642,32 @@ public final class ComparatorProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator)other); @@ -2555,13 +2682,13 @@ public final class ComparatorProtos { if (other.hasComparable()) { mergeComparable(other.getComparable()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasComparable()) { - return false; } return true; @@ -2576,7 +2703,7 @@ public final class ComparatorProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2586,9 +2713,8 @@ public final class ComparatorProtos { } private int bitField0_; - // required .hbase.pb.ByteArrayComparable comparable = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; /** * required .hbase.pb.ByteArrayComparable comparable = 1; @@ -2601,7 +2727,7 @@ public final class ComparatorProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { if (comparableBuilder_ == null) { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } else { return comparableBuilder_.getMessage(); } @@ -2642,6 +2768,7 @@ public final class ComparatorProtos { public Builder mergeComparable(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { if (comparableBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + comparable_ != null && comparable_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); @@ -2660,7 +2787,7 @@ public final class ComparatorProtos { */ public Builder clearComparable() { if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + comparable_ = null; onChanged(); } else { comparableBuilder_.clear(); @@ -2683,41 +2810,79 @@ public final class ComparatorProtos { if (comparableBuilder_ != null) { return comparableBuilder_.getMessageOrBuilder(); } else { - return comparable_; + return comparable_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } } /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> getComparableFieldBuilder() { if (comparableBuilder_ == null) { - comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< + comparableBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( - comparable_, + getComparable(), getParentForChildren(), isClean()); comparable_ = null; } return comparableBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BinaryPrefixComparator) } + // @@protoc_insertion_point(class_scope:hbase.pb.BinaryPrefixComparator) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator DEFAULT_INSTANCE; static { - defaultInstance = new BinaryPrefixComparator(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public BinaryPrefixComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BinaryPrefixComparator(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BinaryPrefixComparator) } - public interface BitComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface BitComparatorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BitComparator) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ByteArrayComparable comparable = 1; /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ @@ -2731,7 +2896,6 @@ public final class ComparatorProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); - // required .hbase.pb.BitComparator.BitwiseOp bitwise_op = 2; /** * required .hbase.pb.BitComparator.BitwiseOp bitwise_op = 2; */ @@ -2744,36 +2908,28 @@ public final class ComparatorProtos { /** * Protobuf type {@code hbase.pb.BitComparator} */ - public static final class BitComparator extends - com.google.protobuf.GeneratedMessage - implements BitComparatorOrBuilder { + public static final class BitComparator extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BitComparator) + BitComparatorOrBuilder { // Use BitComparator.newBuilder() to construct. - private BitComparator(com.google.protobuf.GeneratedMessage.Builder builder) { + private BitComparator(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private BitComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BitComparator defaultInstance; - public static BitComparator getDefaultInstance() { - return defaultInstance; - } - - public BitComparator getDefaultInstanceForType() { - return defaultInstance; + private BitComparator() { + bitwiseOp_ = 1; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private BitComparator( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2812,7 +2968,7 @@ public final class ComparatorProtos { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; - bitwiseOp_ = value; + bitwiseOp_ = rawValue; } break; } @@ -2822,7 +2978,7 @@ public final class ComparatorProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2832,27 +2988,12 @@ public final class ComparatorProtos { getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BitComparator_descriptor; } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BitComparator_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BitComparator parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BitComparator(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BitComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.Builder.class); } /** @@ -2863,15 +3004,15 @@ public final class ComparatorProtos { /** * AND = 1; */ - AND(0, 1), + AND(1), /** * OR = 2; */ - OR(1, 2), + OR(2), /** * XOR = 3; */ - XOR(2, 3), + XOR(3), ; /** @@ -2888,9 +3029,19 @@ public final class ComparatorProtos { public static final int XOR_VALUE = 3; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static BitwiseOp valueOf(int value) { + return forNumber(value); + } + + public static BitwiseOp forNumber(int value) { switch (value) { case 1: return AND; case 2: return OR; @@ -2903,17 +3054,17 @@ public final class ComparatorProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + BitwiseOp> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public BitwiseOp findValueByNumber(int number) { - return BitwiseOp.valueOf(number); + return BitwiseOp.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -2935,11 +3086,9 @@ public final class ComparatorProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private BitwiseOp(int index, int value) { - this.index = index; + private BitwiseOp(int value) { this.value = value; } @@ -2947,7 +3096,6 @@ public final class ComparatorProtos { } private int bitField0_; - // required .hbase.pb.ByteArrayComparable comparable = 1; public static final int COMPARABLE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; /** @@ -2960,18 +3108,17 @@ public final class ComparatorProtos { * required .hbase.pb.ByteArrayComparable comparable = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } - // required .hbase.pb.BitComparator.BitwiseOp bitwise_op = 2; public static final int BITWISE_OP_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp bitwiseOp_; + private int bitwiseOp_; /** * required .hbase.pb.BitComparator.BitwiseOp bitwise_op = 2; */ @@ -2982,17 +3129,15 @@ public final class ComparatorProtos { * required .hbase.pb.BitComparator.BitwiseOp bitwise_op = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp() { - return bitwiseOp_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp result = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.valueOf(bitwiseOp_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND : result; } - private void initFields() { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - bitwiseOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasComparable()) { memoizedIsInitialized = 0; @@ -3008,43 +3153,35 @@ public final class ComparatorProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, comparable_); + output.writeMessage(1, getComparable()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, bitwiseOp_.getNumber()); + output.writeEnum(2, bitwiseOp_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, comparable_); + .computeMessageSize(1, getComparable()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, bitwiseOp_.getNumber()); + .computeEnumSize(2, bitwiseOp_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3062,15 +3199,12 @@ public final class ComparatorProtos { } result = result && (hasBitwiseOp() == other.hasBitwiseOp()); if (hasBitwiseOp()) { - result = result && - (getBitwiseOp() == other.getBitwiseOp()); + result = result && bitwiseOp_ == other.bitwiseOp_; } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3084,9 +3218,9 @@ public final class ComparatorProtos { } if (hasBitwiseOp()) { hash = (37 * hash) + BITWISE_OP_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getBitwiseOp()); + hash = (53 * hash) + bitwiseOp_; } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3114,46 +3248,57 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3161,14 +3306,15 @@ public final class ComparatorProtos { * Protobuf type {@code hbase.pb.BitComparator} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparatorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BitComparator) + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparatorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BitComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BitComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3181,36 +3327,29 @@ public final class ComparatorProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getComparableFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + comparable_ = null; } else { comparableBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); - bitwiseOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; + bitwiseOp_ = 1; bitField0_ = (bitField0_ & ~0x00000002); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_BitComparator_descriptor; @@ -3249,6 +3388,32 @@ public final class ComparatorProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator)other); @@ -3266,17 +3431,16 @@ public final class ComparatorProtos { if (other.hasBitwiseOp()) { setBitwiseOp(other.getBitwiseOp()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasComparable()) { - return false; } if (!hasBitwiseOp()) { - return false; } return true; @@ -3291,7 +3455,7 @@ public final class ComparatorProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3301,9 +3465,8 @@ public final class ComparatorProtos { } private int bitField0_; - // required .hbase.pb.ByteArrayComparable comparable = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; /** * required .hbase.pb.ByteArrayComparable comparable = 1; @@ -3316,7 +3479,7 @@ public final class ComparatorProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { if (comparableBuilder_ == null) { - return comparable_; + return comparable_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } else { return comparableBuilder_.getMessage(); } @@ -3357,6 +3520,7 @@ public final class ComparatorProtos { public Builder mergeComparable(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { if (comparableBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + comparable_ != null && comparable_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); @@ -3375,7 +3539,7 @@ public final class ComparatorProtos { */ public Builder clearComparable() { if (comparableBuilder_ == null) { - comparable_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + comparable_ = null; onChanged(); } else { comparableBuilder_.clear(); @@ -3398,19 +3562,20 @@ public final class ComparatorProtos { if (comparableBuilder_ != null) { return comparableBuilder_.getMessageOrBuilder(); } else { - return comparable_; + return comparable_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance() : comparable_; } } /** * required .hbase.pb.ByteArrayComparable comparable = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> getComparableFieldBuilder() { if (comparableBuilder_ == null) { - comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< + comparableBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( - comparable_, + getComparable(), getParentForChildren(), isClean()); comparable_ = null; @@ -3418,8 +3583,7 @@ public final class ComparatorProtos { return comparableBuilder_; } - // required .hbase.pb.BitComparator.BitwiseOp bitwise_op = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp bitwiseOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; + private int bitwiseOp_ = 1; /** * required .hbase.pb.BitComparator.BitwiseOp bitwise_op = 2; */ @@ -3430,7 +3594,8 @@ public final class ComparatorProtos { * required .hbase.pb.BitComparator.BitwiseOp bitwise_op = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp getBitwiseOp() { - return bitwiseOp_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp result = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.valueOf(bitwiseOp_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND : result; } /** * required .hbase.pb.BitComparator.BitwiseOp bitwise_op = 2; @@ -3440,7 +3605,7 @@ public final class ComparatorProtos { throw new NullPointerException(); } bitField0_ |= 0x00000002; - bitwiseOp_ = value; + bitwiseOp_ = value.getNumber(); onChanged(); return this; } @@ -3449,58 +3614,87 @@ public final class ComparatorProtos { */ public Builder clearBitwiseOp() { bitField0_ = (bitField0_ & ~0x00000002); - bitwiseOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator.BitwiseOp.AND; + bitwiseOp_ = 1; onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BitComparator) } + // @@protoc_insertion_point(class_scope:hbase.pb.BitComparator) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator DEFAULT_INSTANCE; static { - defaultInstance = new BitComparator(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public BitComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BitComparator(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BitComparator) } - public interface NullComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface NullComparatorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.NullComparator) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.NullComparator} */ - public static final class NullComparator extends - com.google.protobuf.GeneratedMessage - implements NullComparatorOrBuilder { + public static final class NullComparator extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.NullComparator) + NullComparatorOrBuilder { // Use NullComparator.newBuilder() to construct. - private NullComparator(com.google.protobuf.GeneratedMessage.Builder builder) { + private NullComparator(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private NullComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final NullComparator defaultInstance; - public static NullComparator getDefaultInstance() { - return defaultInstance; - } - - public NullComparator getDefaultInstanceForType() { - return defaultInstance; + private NullComparator() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private NullComparator( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -3524,7 +3718,7 @@ public final class ComparatorProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -3535,34 +3729,18 @@ public final class ComparatorProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_NullComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_NullComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NullComparator parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new NullComparator(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -3570,29 +3748,21 @@ public final class ComparatorProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3603,12 +3773,10 @@ public final class ComparatorProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3616,7 +3784,7 @@ public final class ComparatorProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3644,46 +3812,57 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3691,14 +3870,15 @@ public final class ComparatorProtos { * Protobuf type {@code hbase.pb.NullComparator} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparatorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.NullComparator) + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparatorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_NullComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_NullComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3711,27 +3891,20 @@ public final class ComparatorProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_NullComparator_descriptor; @@ -3755,6 +3928,32 @@ public final class ComparatorProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator)other); @@ -3766,7 +3965,8 @@ public final class ComparatorProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -3783,7 +3983,7 @@ public final class ComparatorProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3791,22 +3991,59 @@ public final class ComparatorProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.NullComparator) } + // @@protoc_insertion_point(class_scope:hbase.pb.NullComparator) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator DEFAULT_INSTANCE; static { - defaultInstance = new NullComparator(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NullComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NullComparator(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.NullComparator) } - public interface RegexStringComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegexStringComparatorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegexStringComparator) + com.google.protobuf.MessageOrBuilder { - // required string pattern = 1; /** * required string pattern = 1; */ @@ -3821,7 +4058,6 @@ public final class ComparatorProtos { com.google.protobuf.ByteString getPatternBytes(); - // required int32 pattern_flags = 2; /** * required int32 pattern_flags = 2; */ @@ -3831,7 +4067,6 @@ public final class ComparatorProtos { */ int getPatternFlags(); - // required string charset = 3; /** * required string charset = 3; */ @@ -3846,7 +4081,6 @@ public final class ComparatorProtos { com.google.protobuf.ByteString getCharsetBytes(); - // optional string engine = 4; /** * optional string engine = 4; */ @@ -3864,36 +4098,31 @@ public final class ComparatorProtos { /** * Protobuf type {@code hbase.pb.RegexStringComparator} */ - public static final class RegexStringComparator extends - com.google.protobuf.GeneratedMessage - implements RegexStringComparatorOrBuilder { + public static final class RegexStringComparator extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegexStringComparator) + RegexStringComparatorOrBuilder { // Use RegexStringComparator.newBuilder() to construct. - private RegexStringComparator(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegexStringComparator(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegexStringComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegexStringComparator defaultInstance; - public static RegexStringComparator getDefaultInstance() { - return defaultInstance; } - - public RegexStringComparator getDefaultInstanceForType() { - return defaultInstance; + private RegexStringComparator() { + pattern_ = ""; + patternFlags_ = 0; + charset_ = ""; + engine_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegexStringComparator( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3913,8 +4142,9 @@ public final class ComparatorProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - pattern_ = input.readBytes(); + pattern_ = bs; break; } case 16: { @@ -3923,13 +4153,15 @@ public final class ComparatorProtos { break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - charset_ = input.readBytes(); + charset_ = bs; break; } case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; - engine_ = input.readBytes(); + engine_ = bs; break; } } @@ -3938,7 +4170,7 @@ public final class ComparatorProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -3949,32 +4181,16 @@ public final class ComparatorProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_RegexStringComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_RegexStringComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegexStringComparator parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegexStringComparator(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string pattern = 1; public static final int PATTERN_FIELD_NUMBER = 1; - private java.lang.Object pattern_; + private volatile java.lang.Object pattern_; /** * required string pattern = 1; */ @@ -4015,7 +4231,6 @@ public final class ComparatorProtos { } } - // required int32 pattern_flags = 2; public static final int PATTERN_FLAGS_FIELD_NUMBER = 2; private int patternFlags_; /** @@ -4031,9 +4246,8 @@ public final class ComparatorProtos { return patternFlags_; } - // required string charset = 3; public static final int CHARSET_FIELD_NUMBER = 3; - private java.lang.Object charset_; + private volatile java.lang.Object charset_; /** * required string charset = 3; */ @@ -4074,9 +4288,8 @@ public final class ComparatorProtos { } } - // optional string engine = 4; public static final int ENGINE_FIELD_NUMBER = 4; - private java.lang.Object engine_; + private volatile java.lang.Object engine_; /** * optional string engine = 4; */ @@ -4117,16 +4330,11 @@ public final class ComparatorProtos { } } - private void initFields() { - pattern_ = ""; - patternFlags_ = 0; - charset_ = ""; - engine_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasPattern()) { memoizedIsInitialized = 0; @@ -4146,57 +4354,46 @@ public final class ComparatorProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getPatternBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, pattern_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, patternFlags_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getCharsetBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, charset_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, getEngineBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, engine_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getPatternBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, pattern_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, patternFlags_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getCharsetBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, charset_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getEngineBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, engine_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4227,12 +4424,10 @@ public final class ComparatorProtos { result = result && getEngine() .equals(other.getEngine()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4256,7 +4451,7 @@ public final class ComparatorProtos { hash = (37 * hash) + ENGINE_FIELD_NUMBER; hash = (53 * hash) + getEngine().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4284,46 +4479,57 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4331,14 +4537,15 @@ public final class ComparatorProtos { * Protobuf type {@code hbase.pb.RegexStringComparator} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparatorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegexStringComparator) + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparatorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_RegexStringComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_RegexStringComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4351,18 +4558,15 @@ public final class ComparatorProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); pattern_ = ""; @@ -4376,10 +4580,6 @@ public final class ComparatorProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_RegexStringComparator_descriptor; @@ -4422,6 +4622,32 @@ public final class ComparatorProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator)other); @@ -4451,21 +4677,19 @@ public final class ComparatorProtos { engine_ = other.engine_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasPattern()) { - return false; } if (!hasPatternFlags()) { - return false; } if (!hasCharset()) { - return false; } return true; @@ -4480,7 +4704,7 @@ public final class ComparatorProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4490,7 +4714,6 @@ public final class ComparatorProtos { } private int bitField0_; - // required string pattern = 1; private java.lang.Object pattern_ = ""; /** * required string pattern = 1; @@ -4504,9 +4727,12 @@ public final class ComparatorProtos { public java.lang.String getPattern() { java.lang.Object ref = pattern_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - pattern_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + pattern_ = s; + } return s; } else { return (java.lang.String) ref; @@ -4564,7 +4790,6 @@ public final class ComparatorProtos { return this; } - // required int32 pattern_flags = 2; private int patternFlags_ ; /** * required int32 pattern_flags = 2; @@ -4597,7 +4822,6 @@ public final class ComparatorProtos { return this; } - // required string charset = 3; private java.lang.Object charset_ = ""; /** * required string charset = 3; @@ -4611,9 +4835,12 @@ public final class ComparatorProtos { public java.lang.String getCharset() { java.lang.Object ref = charset_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - charset_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + charset_ = s; + } return s; } else { return (java.lang.String) ref; @@ -4671,7 +4898,6 @@ public final class ComparatorProtos { return this; } - // optional string engine = 4; private java.lang.Object engine_ = ""; /** * optional string engine = 4; @@ -4685,9 +4911,12 @@ public final class ComparatorProtos { public java.lang.String getEngine() { java.lang.Object ref = engine_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - engine_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + engine_ = s; + } return s; } else { return (java.lang.String) ref; @@ -4744,22 +4973,59 @@ public final class ComparatorProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegexStringComparator) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegexStringComparator) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator DEFAULT_INSTANCE; static { - defaultInstance = new RegexStringComparator(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegexStringComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegexStringComparator(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegexStringComparator) } - public interface SubstringComparatorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SubstringComparatorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SubstringComparator) + com.google.protobuf.MessageOrBuilder { - // required string substr = 1; /** * required string substr = 1; */ @@ -4777,36 +5043,28 @@ public final class ComparatorProtos { /** * Protobuf type {@code hbase.pb.SubstringComparator} */ - public static final class SubstringComparator extends - com.google.protobuf.GeneratedMessage - implements SubstringComparatorOrBuilder { + public static final class SubstringComparator extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SubstringComparator) + SubstringComparatorOrBuilder { // Use SubstringComparator.newBuilder() to construct. - private SubstringComparator(com.google.protobuf.GeneratedMessage.Builder builder) { + private SubstringComparator(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SubstringComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SubstringComparator defaultInstance; - public static SubstringComparator getDefaultInstance() { - return defaultInstance; } - - public SubstringComparator getDefaultInstanceForType() { - return defaultInstance; + private SubstringComparator() { + substr_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SubstringComparator( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4826,8 +5084,9 @@ public final class ComparatorProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - substr_ = input.readBytes(); + substr_ = bs; break; } } @@ -4836,7 +5095,7 @@ public final class ComparatorProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -4847,32 +5106,16 @@ public final class ComparatorProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_SubstringComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_SubstringComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SubstringComparator parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SubstringComparator(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string substr = 1; public static final int SUBSTR_FIELD_NUMBER = 1; - private java.lang.Object substr_; + private volatile java.lang.Object substr_; /** * required string substr = 1; */ @@ -4913,13 +5156,11 @@ public final class ComparatorProtos { } } - private void initFields() { - substr_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasSubstr()) { memoizedIsInitialized = 0; @@ -4931,36 +5172,27 @@ public final class ComparatorProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getSubstrBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, substr_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getSubstrBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, substr_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4976,12 +5208,10 @@ public final class ComparatorProtos { result = result && getSubstr() .equals(other.getSubstr()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4993,7 +5223,7 @@ public final class ComparatorProtos { hash = (37 * hash) + SUBSTR_FIELD_NUMBER; hash = (53 * hash) + getSubstr().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5021,46 +5251,57 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -5068,14 +5309,15 @@ public final class ComparatorProtos { * Protobuf type {@code hbase.pb.SubstringComparator} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparatorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SubstringComparator) + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparatorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_SubstringComparator_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_SubstringComparator_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5088,18 +5330,15 @@ public final class ComparatorProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); substr_ = ""; @@ -5107,10 +5346,6 @@ public final class ComparatorProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.internal_static_hbase_pb_SubstringComparator_descriptor; @@ -5141,6 +5376,32 @@ public final class ComparatorProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator)other); @@ -5157,13 +5418,13 @@ public final class ComparatorProtos { substr_ = other.substr_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasSubstr()) { - return false; } return true; @@ -5178,7 +5439,7 @@ public final class ComparatorProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5188,7 +5449,6 @@ public final class ComparatorProtos { } private int bitField0_; - // required string substr = 1; private java.lang.Object substr_ = ""; /** * required string substr = 1; @@ -5202,9 +5462,12 @@ public final class ComparatorProtos { public java.lang.String getSubstr() { java.lang.Object ref = substr_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - substr_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + substr_ = s; + } return s; } else { return (java.lang.String) ref; @@ -5261,69 +5524,106 @@ public final class ComparatorProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SubstringComparator) } + // @@protoc_insertion_point(class_scope:hbase.pb.SubstringComparator) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator DEFAULT_INSTANCE; static { - defaultInstance = new SubstringComparator(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SubstringComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SubstringComparator(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SubstringComparator) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Comparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Comparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ByteArrayComparable_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ByteArrayComparable_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BinaryComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BinaryComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_LongComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_LongComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BinaryPrefixComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BinaryPrefixComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BitComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BitComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NullComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NullComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegexStringComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegexStringComparator_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SubstringComparator_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SubstringComparator_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -5348,71 +5648,71 @@ public final class ComparatorProtos { "ratorProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_Comparator_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_Comparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Comparator_descriptor, - new java.lang.String[] { "Name", "SerializedComparator", }); - internal_static_hbase_pb_ByteArrayComparable_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_ByteArrayComparable_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ByteArrayComparable_descriptor, - new java.lang.String[] { "Value", }); - internal_static_hbase_pb_BinaryComparator_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_BinaryComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BinaryComparator_descriptor, - new java.lang.String[] { "Comparable", }); - internal_static_hbase_pb_LongComparator_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_LongComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_LongComparator_descriptor, - new java.lang.String[] { "Comparable", }); - internal_static_hbase_pb_BinaryPrefixComparator_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_BinaryPrefixComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BinaryPrefixComparator_descriptor, - new java.lang.String[] { "Comparable", }); - internal_static_hbase_pb_BitComparator_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_BitComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BitComparator_descriptor, - new java.lang.String[] { "Comparable", "BitwiseOp", }); - internal_static_hbase_pb_NullComparator_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_NullComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_NullComparator_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_RegexStringComparator_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_RegexStringComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegexStringComparator_descriptor, - new java.lang.String[] { "Pattern", "PatternFlags", "Charset", "Engine", }); - internal_static_hbase_pb_SubstringComparator_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_SubstringComparator_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SubstringComparator_descriptor, - new java.lang.String[] { "Substr", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_hbase_pb_Comparator_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_Comparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Comparator_descriptor, + new java.lang.String[] { "Name", "SerializedComparator", }); + internal_static_hbase_pb_ByteArrayComparable_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_ByteArrayComparable_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ByteArrayComparable_descriptor, + new java.lang.String[] { "Value", }); + internal_static_hbase_pb_BinaryComparator_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_BinaryComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BinaryComparator_descriptor, + new java.lang.String[] { "Comparable", }); + internal_static_hbase_pb_LongComparator_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_LongComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_LongComparator_descriptor, + new java.lang.String[] { "Comparable", }); + internal_static_hbase_pb_BinaryPrefixComparator_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_BinaryPrefixComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BinaryPrefixComparator_descriptor, + new java.lang.String[] { "Comparable", }); + internal_static_hbase_pb_BitComparator_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_BitComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BitComparator_descriptor, + new java.lang.String[] { "Comparable", "BitwiseOp", }); + internal_static_hbase_pb_NullComparator_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_NullComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_NullComparator_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_RegexStringComparator_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_RegexStringComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegexStringComparator_descriptor, + new java.lang.String[] { "Pattern", "PatternFlags", "Charset", "Engine", }); + internal_static_hbase_pb_SubstringComparator_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_SubstringComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SubstringComparator_descriptor, + new java.lang.String[] { "Substr", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java index dca6ea2..689d33f 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class EncryptionProtos { private EncryptionProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface WrappedKeyOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface WrappedKeyOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.WrappedKey) + com.google.protobuf.MessageOrBuilder { - // required string algorithm = 1; /** * required string algorithm = 1; */ @@ -26,7 +32,6 @@ public final class EncryptionProtos { com.google.protobuf.ByteString getAlgorithmBytes(); - // required uint32 length = 2; /** * required uint32 length = 2; */ @@ -36,7 +41,6 @@ public final class EncryptionProtos { */ int getLength(); - // required bytes data = 3; /** * required bytes data = 3; */ @@ -46,7 +50,6 @@ public final class EncryptionProtos { */ com.google.protobuf.ByteString getData(); - // optional bytes iv = 4; /** * optional bytes iv = 4; */ @@ -56,7 +59,6 @@ public final class EncryptionProtos { */ com.google.protobuf.ByteString getIv(); - // optional bytes hash = 5; /** * optional bytes hash = 5; */ @@ -69,36 +71,32 @@ public final class EncryptionProtos { /** * Protobuf type {@code hbase.pb.WrappedKey} */ - public static final class WrappedKey extends - com.google.protobuf.GeneratedMessage - implements WrappedKeyOrBuilder { + public static final class WrappedKey extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.WrappedKey) + WrappedKeyOrBuilder { // Use WrappedKey.newBuilder() to construct. - private WrappedKey(com.google.protobuf.GeneratedMessage.Builder builder) { + private WrappedKey(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private WrappedKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final WrappedKey defaultInstance; - public static WrappedKey getDefaultInstance() { - return defaultInstance; } - - public WrappedKey getDefaultInstanceForType() { - return defaultInstance; + private WrappedKey() { + algorithm_ = ""; + length_ = 0; + data_ = com.google.protobuf.ByteString.EMPTY; + iv_ = com.google.protobuf.ByteString.EMPTY; + hash_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private WrappedKey( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -118,8 +116,9 @@ public final class EncryptionProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - algorithm_ = input.readBytes(); + algorithm_ = bs; break; } case 16: { @@ -148,7 +147,7 @@ public final class EncryptionProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -159,32 +158,16 @@ public final class EncryptionProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.internal_static_hbase_pb_WrappedKey_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.internal_static_hbase_pb_WrappedKey_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey.class, org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WrappedKey parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new WrappedKey(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string algorithm = 1; public static final int ALGORITHM_FIELD_NUMBER = 1; - private java.lang.Object algorithm_; + private volatile java.lang.Object algorithm_; /** * required string algorithm = 1; */ @@ -225,7 +208,6 @@ public final class EncryptionProtos { } } - // required uint32 length = 2; public static final int LENGTH_FIELD_NUMBER = 2; private int length_; /** @@ -241,7 +223,6 @@ public final class EncryptionProtos { return length_; } - // required bytes data = 3; public static final int DATA_FIELD_NUMBER = 3; private com.google.protobuf.ByteString data_; /** @@ -257,7 +238,6 @@ public final class EncryptionProtos { return data_; } - // optional bytes iv = 4; public static final int IV_FIELD_NUMBER = 4; private com.google.protobuf.ByteString iv_; /** @@ -273,7 +253,6 @@ public final class EncryptionProtos { return iv_; } - // optional bytes hash = 5; public static final int HASH_FIELD_NUMBER = 5; private com.google.protobuf.ByteString hash_; /** @@ -289,17 +268,11 @@ public final class EncryptionProtos { return hash_; } - private void initFields() { - algorithm_ = ""; - length_ = 0; - data_ = com.google.protobuf.ByteString.EMPTY; - iv_ = com.google.protobuf.ByteString.EMPTY; - hash_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasAlgorithm()) { memoizedIsInitialized = 0; @@ -319,9 +292,8 @@ public final class EncryptionProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getAlgorithmBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, algorithm_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, length_); @@ -335,18 +307,16 @@ public final class EncryptionProtos { if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, hash_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getAlgorithmBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, algorithm_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -364,19 +334,13 @@ public final class EncryptionProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(5, hash_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -412,12 +376,10 @@ public final class EncryptionProtos { result = result && getHash() .equals(other.getHash()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -445,7 +407,7 @@ public final class EncryptionProtos { hash = (37 * hash) + HASH_FIELD_NUMBER; hash = (53 * hash) + getHash().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -473,46 +435,57 @@ public final class EncryptionProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -520,14 +493,15 @@ public final class EncryptionProtos { * Protobuf type {@code hbase.pb.WrappedKey} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKeyOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.WrappedKey) + org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKeyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.internal_static_hbase_pb_WrappedKey_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.internal_static_hbase_pb_WrappedKey_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -540,18 +514,15 @@ public final class EncryptionProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); algorithm_ = ""; @@ -567,10 +538,6 @@ public final class EncryptionProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.internal_static_hbase_pb_WrappedKey_descriptor; @@ -617,6 +584,32 @@ public final class EncryptionProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey)other); @@ -645,21 +638,19 @@ public final class EncryptionProtos { if (other.hasHash()) { setHash(other.getHash()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasAlgorithm()) { - return false; } if (!hasLength()) { - return false; } if (!hasData()) { - return false; } return true; @@ -674,7 +665,7 @@ public final class EncryptionProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -684,7 +675,6 @@ public final class EncryptionProtos { } private int bitField0_; - // required string algorithm = 1; private java.lang.Object algorithm_ = ""; /** * required string algorithm = 1; @@ -698,9 +688,12 @@ public final class EncryptionProtos { public java.lang.String getAlgorithm() { java.lang.Object ref = algorithm_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - algorithm_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + algorithm_ = s; + } return s; } else { return (java.lang.String) ref; @@ -758,7 +751,6 @@ public final class EncryptionProtos { return this; } - // required uint32 length = 2; private int length_ ; /** * required uint32 length = 2; @@ -791,7 +783,6 @@ public final class EncryptionProtos { return this; } - // required bytes data = 3; private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes data = 3; @@ -827,7 +818,6 @@ public final class EncryptionProtos { return this; } - // optional bytes iv = 4; private com.google.protobuf.ByteString iv_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes iv = 4; @@ -863,7 +853,6 @@ public final class EncryptionProtos { return this; } - // optional bytes hash = 5; private com.google.protobuf.ByteString hash_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes hash = 5; @@ -898,29 +887,66 @@ public final class EncryptionProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.WrappedKey) } + // @@protoc_insertion_point(class_scope:hbase.pb.WrappedKey) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey DEFAULT_INSTANCE; static { - defaultInstance = new WrappedKey(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public WrappedKey parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WrappedKey(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.WrappedKey) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WrappedKey_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WrappedKey_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -931,23 +957,23 @@ public final class EncryptionProtos { ".generatedB\020EncryptionProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_WrappedKey_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_WrappedKey_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_WrappedKey_descriptor, - new java.lang.String[] { "Algorithm", "Length", "Data", "Iv", "Hash", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_hbase_pb_WrappedKey_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_WrappedKey_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_WrappedKey_descriptor, + new java.lang.String[] { "Algorithm", "Length", "Data", "Iv", "Hash", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java index 16e6b11..d696e90 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class ErrorHandlingProtos { private ErrorHandlingProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface StackTraceElementMessageOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StackTraceElementMessageOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.StackTraceElementMessage) + com.google.protobuf.MessageOrBuilder { - // optional string declaring_class = 1; /** * optional string declaring_class = 1; */ @@ -26,7 +32,6 @@ public final class ErrorHandlingProtos { com.google.protobuf.ByteString getDeclaringClassBytes(); - // optional string method_name = 2; /** * optional string method_name = 2; */ @@ -41,7 +46,6 @@ public final class ErrorHandlingProtos { com.google.protobuf.ByteString getMethodNameBytes(); - // optional string file_name = 3; /** * optional string file_name = 3; */ @@ -56,7 +60,6 @@ public final class ErrorHandlingProtos { com.google.protobuf.ByteString getFileNameBytes(); - // optional int32 line_number = 4; /** * optional int32 line_number = 4; */ @@ -67,44 +70,39 @@ public final class ErrorHandlingProtos { int getLineNumber(); } /** - * Protobuf type {@code hbase.pb.StackTraceElementMessage} - * *
    **
    * Protobuf version of a java.lang.StackTraceElement
    * so we can serialize exceptions.
    * 
+ * + * Protobuf type {@code hbase.pb.StackTraceElementMessage} */ - public static final class StackTraceElementMessage extends - com.google.protobuf.GeneratedMessage - implements StackTraceElementMessageOrBuilder { + public static final class StackTraceElementMessage extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.StackTraceElementMessage) + StackTraceElementMessageOrBuilder { // Use StackTraceElementMessage.newBuilder() to construct. - private StackTraceElementMessage(com.google.protobuf.GeneratedMessage.Builder builder) { + private StackTraceElementMessage(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StackTraceElementMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StackTraceElementMessage defaultInstance; - public static StackTraceElementMessage getDefaultInstance() { - return defaultInstance; } - - public StackTraceElementMessage getDefaultInstanceForType() { - return defaultInstance; + private StackTraceElementMessage() { + declaringClass_ = ""; + methodName_ = ""; + fileName_ = ""; + lineNumber_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StackTraceElementMessage( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -124,18 +122,21 @@ public final class ErrorHandlingProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - declaringClass_ = input.readBytes(); + declaringClass_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - methodName_ = input.readBytes(); + methodName_ = bs; break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - fileName_ = input.readBytes(); + fileName_ = bs; break; } case 32: { @@ -149,7 +150,7 @@ public final class ErrorHandlingProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -160,32 +161,16 @@ public final class ErrorHandlingProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_StackTraceElementMessage_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_StackTraceElementMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StackTraceElementMessage parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StackTraceElementMessage(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional string declaring_class = 1; public static final int DECLARING_CLASS_FIELD_NUMBER = 1; - private java.lang.Object declaringClass_; + private volatile java.lang.Object declaringClass_; /** * optional string declaring_class = 1; */ @@ -226,9 +211,8 @@ public final class ErrorHandlingProtos { } } - // optional string method_name = 2; public static final int METHOD_NAME_FIELD_NUMBER = 2; - private java.lang.Object methodName_; + private volatile java.lang.Object methodName_; /** * optional string method_name = 2; */ @@ -269,9 +253,8 @@ public final class ErrorHandlingProtos { } } - // optional string file_name = 3; public static final int FILE_NAME_FIELD_NUMBER = 3; - private java.lang.Object fileName_; + private volatile java.lang.Object fileName_; /** * optional string file_name = 3; */ @@ -312,7 +295,6 @@ public final class ErrorHandlingProtos { } } - // optional int32 line_number = 4; public static final int LINE_NUMBER_FIELD_NUMBER = 4; private int lineNumber_; /** @@ -328,16 +310,11 @@ public final class ErrorHandlingProtos { return lineNumber_; } - private void initFields() { - declaringClass_ = ""; - methodName_ = ""; - fileName_ = ""; - lineNumber_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -345,57 +322,46 @@ public final class ErrorHandlingProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getDeclaringClassBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, declaringClass_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getMethodNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, methodName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getFileNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, fileName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeInt32(4, lineNumber_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getDeclaringClassBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, declaringClass_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getMethodNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, methodName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getFileNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, fileName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(4, lineNumber_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -426,12 +392,10 @@ public final class ErrorHandlingProtos { result = result && (getLineNumber() == other.getLineNumber()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -455,7 +419,7 @@ public final class ErrorHandlingProtos { hash = (37 * hash) + LINE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + getLineNumber(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -483,67 +447,79 @@ public final class ErrorHandlingProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.StackTraceElementMessage} - * *
      **
      * Protobuf version of a java.lang.StackTraceElement
      * so we can serialize exceptions.
      * 
+ * + * Protobuf type {@code hbase.pb.StackTraceElementMessage} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.StackTraceElementMessage) + org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_StackTraceElementMessage_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_StackTraceElementMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -556,18 +532,15 @@ public final class ErrorHandlingProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); declaringClass_ = ""; @@ -581,10 +554,6 @@ public final class ErrorHandlingProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_StackTraceElementMessage_descriptor; @@ -627,6 +596,32 @@ public final class ErrorHandlingProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage)other); @@ -656,7 +651,8 @@ public final class ErrorHandlingProtos { if (other.hasLineNumber()) { setLineNumber(other.getLineNumber()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -673,7 +669,7 @@ public final class ErrorHandlingProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -683,7 +679,6 @@ public final class ErrorHandlingProtos { } private int bitField0_; - // optional string declaring_class = 1; private java.lang.Object declaringClass_ = ""; /** * optional string declaring_class = 1; @@ -697,9 +692,12 @@ public final class ErrorHandlingProtos { public java.lang.String getDeclaringClass() { java.lang.Object ref = declaringClass_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - declaringClass_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + declaringClass_ = s; + } return s; } else { return (java.lang.String) ref; @@ -757,7 +755,6 @@ public final class ErrorHandlingProtos { return this; } - // optional string method_name = 2; private java.lang.Object methodName_ = ""; /** * optional string method_name = 2; @@ -771,9 +768,12 @@ public final class ErrorHandlingProtos { public java.lang.String getMethodName() { java.lang.Object ref = methodName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - methodName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + methodName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -831,7 +831,6 @@ public final class ErrorHandlingProtos { return this; } - // optional string file_name = 3; private java.lang.Object fileName_ = ""; /** * optional string file_name = 3; @@ -845,9 +844,12 @@ public final class ErrorHandlingProtos { public java.lang.String getFileName() { java.lang.Object ref = fileName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - fileName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + fileName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -905,7 +907,6 @@ public final class ErrorHandlingProtos { return this; } - // optional int32 line_number = 4; private int lineNumber_ ; /** * optional int32 line_number = 4; @@ -937,22 +938,59 @@ public final class ErrorHandlingProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.StackTraceElementMessage) } + // @@protoc_insertion_point(class_scope:hbase.pb.StackTraceElementMessage) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage DEFAULT_INSTANCE; static { - defaultInstance = new StackTraceElementMessage(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StackTraceElementMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StackTraceElementMessage(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.StackTraceElementMessage) } - public interface GenericExceptionMessageOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GenericExceptionMessageOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GenericExceptionMessage) + com.google.protobuf.MessageOrBuilder { - // optional string class_name = 1; /** * optional string class_name = 1; */ @@ -967,7 +1005,6 @@ public final class ErrorHandlingProtos { com.google.protobuf.ByteString getClassNameBytes(); - // optional string message = 2; /** * optional string message = 2; */ @@ -982,7 +1019,6 @@ public final class ErrorHandlingProtos { com.google.protobuf.ByteString getMessageBytes(); - // optional bytes error_info = 3; /** * optional bytes error_info = 3; */ @@ -992,7 +1028,6 @@ public final class ErrorHandlingProtos { */ com.google.protobuf.ByteString getErrorInfo(); - // repeated .hbase.pb.StackTraceElementMessage trace = 4; /** * repeated .hbase.pb.StackTraceElementMessage trace = 4; */ @@ -1018,8 +1053,6 @@ public final class ErrorHandlingProtos { int index); } /** - * Protobuf type {@code hbase.pb.GenericExceptionMessage} - * *
    **
    * Cause of a remote failure for a generic exception. Contains
@@ -1027,37 +1060,34 @@ public final class ErrorHandlingProtos {
    * optional info about the error for generic info passing
    * (which should be another protobuffed class).
    * 
+ * + * Protobuf type {@code hbase.pb.GenericExceptionMessage} */ - public static final class GenericExceptionMessage extends - com.google.protobuf.GeneratedMessage - implements GenericExceptionMessageOrBuilder { + public static final class GenericExceptionMessage extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GenericExceptionMessage) + GenericExceptionMessageOrBuilder { // Use GenericExceptionMessage.newBuilder() to construct. - private GenericExceptionMessage(com.google.protobuf.GeneratedMessage.Builder builder) { + private GenericExceptionMessage(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GenericExceptionMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GenericExceptionMessage defaultInstance; - public static GenericExceptionMessage getDefaultInstance() { - return defaultInstance; - } - - public GenericExceptionMessage getDefaultInstanceForType() { - return defaultInstance; + private GenericExceptionMessage() { + className_ = ""; + message_ = ""; + errorInfo_ = com.google.protobuf.ByteString.EMPTY; + trace_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GenericExceptionMessage( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1077,13 +1107,15 @@ public final class ErrorHandlingProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - className_ = input.readBytes(); + className_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - message_ = input.readBytes(); + message_ = bs; break; } case 26: { @@ -1096,7 +1128,8 @@ public final class ErrorHandlingProtos { trace_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } - trace_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.PARSER, extensionRegistry)); + trace_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.PARSER, extensionRegistry)); break; } } @@ -1105,7 +1138,7 @@ public final class ErrorHandlingProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { trace_ = java.util.Collections.unmodifiableList(trace_); @@ -1119,32 +1152,16 @@ public final class ErrorHandlingProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_GenericExceptionMessage_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_GenericExceptionMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GenericExceptionMessage parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GenericExceptionMessage(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional string class_name = 1; public static final int CLASS_NAME_FIELD_NUMBER = 1; - private java.lang.Object className_; + private volatile java.lang.Object className_; /** * optional string class_name = 1; */ @@ -1185,9 +1202,8 @@ public final class ErrorHandlingProtos { } } - // optional string message = 2; public static final int MESSAGE_FIELD_NUMBER = 2; - private java.lang.Object message_; + private volatile java.lang.Object message_; /** * optional string message = 2; */ @@ -1228,7 +1244,6 @@ public final class ErrorHandlingProtos { } } - // optional bytes error_info = 3; public static final int ERROR_INFO_FIELD_NUMBER = 3; private com.google.protobuf.ByteString errorInfo_; /** @@ -1244,7 +1259,6 @@ public final class ErrorHandlingProtos { return errorInfo_; } - // repeated .hbase.pb.StackTraceElementMessage trace = 4; public static final int TRACE_FIELD_NUMBER = 4; private java.util.List trace_; /** @@ -1280,16 +1294,11 @@ public final class ErrorHandlingProtos { return trace_.get(index); } - private void initFields() { - className_ = ""; - message_ = ""; - errorInfo_ = com.google.protobuf.ByteString.EMPTY; - trace_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -1297,12 +1306,11 @@ public final class ErrorHandlingProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getClassNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, className_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getMessageBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, message_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, errorInfo_); @@ -1310,22 +1318,19 @@ public final class ErrorHandlingProtos { for (int i = 0; i < trace_.size(); i++) { output.writeMessage(4, trace_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getClassNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, className_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getMessageBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, message_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -1335,19 +1340,13 @@ public final class ErrorHandlingProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, trace_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1375,12 +1374,10 @@ public final class ErrorHandlingProtos { } result = result && getTraceList() .equals(other.getTraceList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1404,7 +1401,7 @@ public final class ErrorHandlingProtos { hash = (37 * hash) + TRACE_FIELD_NUMBER; hash = (53 * hash) + getTraceList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1432,52 +1429,61 @@ public final class ErrorHandlingProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.GenericExceptionMessage} - * *
      **
      * Cause of a remote failure for a generic exception. Contains
@@ -1485,16 +1491,19 @@ public final class ErrorHandlingProtos {
      * optional info about the error for generic info passing
      * (which should be another protobuffed class).
      * 
+ * + * Protobuf type {@code hbase.pb.GenericExceptionMessage} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GenericExceptionMessage) + org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_GenericExceptionMessage_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_GenericExceptionMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1507,19 +1516,16 @@ public final class ErrorHandlingProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTraceFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); className_ = ""; @@ -1537,10 +1543,6 @@ public final class ErrorHandlingProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_GenericExceptionMessage_descriptor; @@ -1588,6 +1590,32 @@ public final class ErrorHandlingProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage)other); @@ -1631,14 +1659,15 @@ public final class ErrorHandlingProtos { trace_ = other.trace_; bitField0_ = (bitField0_ & ~0x00000008); traceBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTraceFieldBuilder() : null; } else { traceBuilder_.addAllMessages(other.trace_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -1655,7 +1684,7 @@ public final class ErrorHandlingProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1665,7 +1694,6 @@ public final class ErrorHandlingProtos { } private int bitField0_; - // optional string class_name = 1; private java.lang.Object className_ = ""; /** * optional string class_name = 1; @@ -1679,9 +1707,12 @@ public final class ErrorHandlingProtos { public java.lang.String getClassName() { java.lang.Object ref = className_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - className_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + className_ = s; + } return s; } else { return (java.lang.String) ref; @@ -1739,7 +1770,6 @@ public final class ErrorHandlingProtos { return this; } - // optional string message = 2; private java.lang.Object message_ = ""; /** * optional string message = 2; @@ -1753,9 +1783,12 @@ public final class ErrorHandlingProtos { public java.lang.String getMessage() { java.lang.Object ref = message_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - message_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + message_ = s; + } return s; } else { return (java.lang.String) ref; @@ -1813,7 +1846,6 @@ public final class ErrorHandlingProtos { return this; } - // optional bytes error_info = 3; private com.google.protobuf.ByteString errorInfo_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes error_info = 3; @@ -1849,7 +1881,6 @@ public final class ErrorHandlingProtos { return this; } - // repeated .hbase.pb.StackTraceElementMessage trace = 4; private java.util.List trace_ = java.util.Collections.emptyList(); private void ensureTraceIsMutable() { @@ -1859,7 +1890,7 @@ public final class ErrorHandlingProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder> traceBuilder_; /** @@ -1991,7 +2022,8 @@ public final class ErrorHandlingProtos { java.lang.Iterable values) { if (traceBuilder_ == null) { ensureTraceIsMutable(); - super.addAll(values, trace_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, trace_); onChanged(); } else { traceBuilder_.addAllMessages(values); @@ -2074,11 +2106,11 @@ public final class ErrorHandlingProtos { getTraceBuilderList() { return getTraceFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder> getTraceFieldBuilder() { if (traceBuilder_ == null) { - traceBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + traceBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder>( trace_, ((bitField0_ & 0x00000008) == 0x00000008), @@ -2088,22 +2120,59 @@ public final class ErrorHandlingProtos { } return traceBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GenericExceptionMessage) } + // @@protoc_insertion_point(class_scope:hbase.pb.GenericExceptionMessage) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage DEFAULT_INSTANCE; static { - defaultInstance = new GenericExceptionMessage(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GenericExceptionMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GenericExceptionMessage(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GenericExceptionMessage) } - public interface ForeignExceptionMessageOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ForeignExceptionMessageOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ForeignExceptionMessage) + com.google.protobuf.MessageOrBuilder { - // optional string source = 1; /** * optional string source = 1; */ @@ -2118,7 +2187,6 @@ public final class ErrorHandlingProtos { com.google.protobuf.ByteString getSourceBytes(); - // optional .hbase.pb.GenericExceptionMessage generic_exception = 2; /** * optional .hbase.pb.GenericExceptionMessage generic_exception = 2; */ @@ -2133,44 +2201,36 @@ public final class ErrorHandlingProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder getGenericExceptionOrBuilder(); } /** - * Protobuf type {@code hbase.pb.ForeignExceptionMessage} - * *
    **
    * Exception sent across the wire when a remote task needs
    * to notify other tasks that it failed and why
    * 
+ * + * Protobuf type {@code hbase.pb.ForeignExceptionMessage} */ - public static final class ForeignExceptionMessage extends - com.google.protobuf.GeneratedMessage - implements ForeignExceptionMessageOrBuilder { + public static final class ForeignExceptionMessage extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ForeignExceptionMessage) + ForeignExceptionMessageOrBuilder { // Use ForeignExceptionMessage.newBuilder() to construct. - private ForeignExceptionMessage(com.google.protobuf.GeneratedMessage.Builder builder) { + private ForeignExceptionMessage(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ForeignExceptionMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ForeignExceptionMessage defaultInstance; - public static ForeignExceptionMessage getDefaultInstance() { - return defaultInstance; - } - - public ForeignExceptionMessage getDefaultInstanceForType() { - return defaultInstance; + private ForeignExceptionMessage() { + source_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ForeignExceptionMessage( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2190,8 +2250,9 @@ public final class ErrorHandlingProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - source_ = input.readBytes(); + source_ = bs; break; } case 18: { @@ -2213,7 +2274,7 @@ public final class ErrorHandlingProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2224,32 +2285,16 @@ public final class ErrorHandlingProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_ForeignExceptionMessage_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_ForeignExceptionMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ForeignExceptionMessage parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ForeignExceptionMessage(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional string source = 1; public static final int SOURCE_FIELD_NUMBER = 1; - private java.lang.Object source_; + private volatile java.lang.Object source_; /** * optional string source = 1; */ @@ -2290,7 +2335,6 @@ public final class ErrorHandlingProtos { } } - // optional .hbase.pb.GenericExceptionMessage generic_exception = 2; public static final int GENERIC_EXCEPTION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage genericException_; /** @@ -2303,23 +2347,20 @@ public final class ErrorHandlingProtos { * optional .hbase.pb.GenericExceptionMessage generic_exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getGenericException() { - return genericException_; + return genericException_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance() : genericException_; } /** * optional .hbase.pb.GenericExceptionMessage generic_exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder getGenericExceptionOrBuilder() { - return genericException_; + return genericException_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance() : genericException_; } - private void initFields() { - source_ = ""; - genericException_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -2327,43 +2368,34 @@ public final class ErrorHandlingProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getSourceBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, source_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, genericException_); + output.writeMessage(2, getGenericException()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getSourceBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, source_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, genericException_); + .computeMessageSize(2, getGenericException()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2384,12 +2416,10 @@ public final class ErrorHandlingProtos { result = result && getGenericException() .equals(other.getGenericException()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2405,7 +2435,7 @@ public final class ErrorHandlingProtos { hash = (37 * hash) + GENERIC_EXCEPTION_FIELD_NUMBER; hash = (53 * hash) + getGenericException().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2433,67 +2463,79 @@ public final class ErrorHandlingProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ForeignExceptionMessage} - * *
      **
      * Exception sent across the wire when a remote task needs
      * to notify other tasks that it failed and why
      * 
+ * + * Protobuf type {@code hbase.pb.ForeignExceptionMessage} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ForeignExceptionMessage) + org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_ForeignExceptionMessage_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_ForeignExceptionMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2506,25 +2548,22 @@ public final class ErrorHandlingProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getGenericExceptionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); source_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (genericExceptionBuilder_ == null) { - genericException_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance(); + genericException_ = null; } else { genericExceptionBuilder_.clear(); } @@ -2532,10 +2571,6 @@ public final class ErrorHandlingProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_ForeignExceptionMessage_descriptor; @@ -2574,6 +2609,32 @@ public final class ErrorHandlingProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage)other); @@ -2593,7 +2654,8 @@ public final class ErrorHandlingProtos { if (other.hasGenericException()) { mergeGenericException(other.getGenericException()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -2610,7 +2672,7 @@ public final class ErrorHandlingProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2620,7 +2682,6 @@ public final class ErrorHandlingProtos { } private int bitField0_; - // optional string source = 1; private java.lang.Object source_ = ""; /** * optional string source = 1; @@ -2634,9 +2695,12 @@ public final class ErrorHandlingProtos { public java.lang.String getSource() { java.lang.Object ref = source_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - source_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + source_ = s; + } return s; } else { return (java.lang.String) ref; @@ -2694,9 +2758,8 @@ public final class ErrorHandlingProtos { return this; } - // optional .hbase.pb.GenericExceptionMessage generic_exception = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage genericException_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage genericException_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder> genericExceptionBuilder_; /** * optional .hbase.pb.GenericExceptionMessage generic_exception = 2; @@ -2709,7 +2772,7 @@ public final class ErrorHandlingProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getGenericException() { if (genericExceptionBuilder_ == null) { - return genericException_; + return genericException_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance() : genericException_; } else { return genericExceptionBuilder_.getMessage(); } @@ -2750,6 +2813,7 @@ public final class ErrorHandlingProtos { public Builder mergeGenericException(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage value) { if (genericExceptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + genericException_ != null && genericException_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance()) { genericException_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.newBuilder(genericException_).mergeFrom(value).buildPartial(); @@ -2768,7 +2832,7 @@ public final class ErrorHandlingProtos { */ public Builder clearGenericException() { if (genericExceptionBuilder_ == null) { - genericException_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance(); + genericException_ = null; onChanged(); } else { genericExceptionBuilder_.clear(); @@ -2791,58 +2855,96 @@ public final class ErrorHandlingProtos { if (genericExceptionBuilder_ != null) { return genericExceptionBuilder_.getMessageOrBuilder(); } else { - return genericException_; + return genericException_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance() : genericException_; } } /** * optional .hbase.pb.GenericExceptionMessage generic_exception = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder> getGenericExceptionFieldBuilder() { if (genericExceptionBuilder_ == null) { - genericExceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + genericExceptionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder>( - genericException_, + getGenericException(), getParentForChildren(), isClean()); genericException_ = null; } return genericExceptionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ForeignExceptionMessage) } + // @@protoc_insertion_point(class_scope:hbase.pb.ForeignExceptionMessage) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage DEFAULT_INSTANCE; static { - defaultInstance = new ForeignExceptionMessage(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ForeignExceptionMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ForeignExceptionMessage(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ForeignExceptionMessage) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StackTraceElementMessage_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_StackTraceElementMessage_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GenericExceptionMessage_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GenericExceptionMessage_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ForeignExceptionMessage_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ForeignExceptionMessage_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -2860,35 +2962,35 @@ public final class ErrorHandlingProtos { "\023ErrorHandlingProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_StackTraceElementMessage_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_StackTraceElementMessage_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_StackTraceElementMessage_descriptor, - new java.lang.String[] { "DeclaringClass", "MethodName", "FileName", "LineNumber", }); - internal_static_hbase_pb_GenericExceptionMessage_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_GenericExceptionMessage_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GenericExceptionMessage_descriptor, - new java.lang.String[] { "ClassName", "Message", "ErrorInfo", "Trace", }); - internal_static_hbase_pb_ForeignExceptionMessage_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_ForeignExceptionMessage_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ForeignExceptionMessage_descriptor, - new java.lang.String[] { "Source", "GenericException", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_hbase_pb_StackTraceElementMessage_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_StackTraceElementMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_StackTraceElementMessage_descriptor, + new java.lang.String[] { "DeclaringClass", "MethodName", "FileName", "LineNumber", }); + internal_static_hbase_pb_GenericExceptionMessage_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_GenericExceptionMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GenericExceptionMessage_descriptor, + new java.lang.String[] { "ClassName", "Message", "ErrorInfo", "Trace", }); + internal_static_hbase_pb_ForeignExceptionMessage_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_ForeignExceptionMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ForeignExceptionMessage_descriptor, + new java.lang.String[] { "Source", "GenericException", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java index 74d3f86..5b456c8 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class FSProtos { private FSProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface HBaseVersionFileContentOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface HBaseVersionFileContentOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.HBaseVersionFileContent) + com.google.protobuf.MessageOrBuilder { - // required string version = 1; /** * required string version = 1; */ @@ -27,43 +33,35 @@ public final class FSProtos { getVersionBytes(); } /** - * Protobuf type {@code hbase.pb.HBaseVersionFileContent} - * *
    **
    * The ${HBASE_ROOTDIR}/hbase.version file content
    * 
+ * + * Protobuf type {@code hbase.pb.HBaseVersionFileContent} */ - public static final class HBaseVersionFileContent extends - com.google.protobuf.GeneratedMessage - implements HBaseVersionFileContentOrBuilder { + public static final class HBaseVersionFileContent extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.HBaseVersionFileContent) + HBaseVersionFileContentOrBuilder { // Use HBaseVersionFileContent.newBuilder() to construct. - private HBaseVersionFileContent(com.google.protobuf.GeneratedMessage.Builder builder) { + private HBaseVersionFileContent(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private HBaseVersionFileContent(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final HBaseVersionFileContent defaultInstance; - public static HBaseVersionFileContent getDefaultInstance() { - return defaultInstance; - } - - public HBaseVersionFileContent getDefaultInstanceForType() { - return defaultInstance; + private HBaseVersionFileContent() { + version_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private HBaseVersionFileContent( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -83,8 +81,9 @@ public final class FSProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - version_ = input.readBytes(); + version_ = bs; break; } } @@ -93,7 +92,7 @@ public final class FSProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -104,32 +103,16 @@ public final class FSProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public HBaseVersionFileContent parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new HBaseVersionFileContent(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string version = 1; public static final int VERSION_FIELD_NUMBER = 1; - private java.lang.Object version_; + private volatile java.lang.Object version_; /** * required string version = 1; */ @@ -170,13 +153,11 @@ public final class FSProtos { } } - private void initFields() { - version_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasVersion()) { memoizedIsInitialized = 0; @@ -188,36 +169,27 @@ public final class FSProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getVersionBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, version_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getVersionBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, version_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -233,12 +205,10 @@ public final class FSProtos { result = result && getVersion() .equals(other.getVersion()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -250,7 +220,7 @@ public final class FSProtos { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -278,66 +248,78 @@ public final class FSProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.HBaseVersionFileContent} - * *
      **
      * The ${HBASE_ROOTDIR}/hbase.version file content
      * 
+ * + * Protobuf type {@code hbase.pb.HBaseVersionFileContent} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.HBaseVersionFileContent) + org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -350,18 +332,15 @@ public final class FSProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); version_ = ""; @@ -369,10 +348,6 @@ public final class FSProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_descriptor; @@ -403,6 +378,32 @@ public final class FSProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent)other); @@ -419,13 +420,13 @@ public final class FSProtos { version_ = other.version_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasVersion()) { - return false; } return true; @@ -440,7 +441,7 @@ public final class FSProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -450,7 +451,6 @@ public final class FSProtos { } private int bitField0_; - // required string version = 1; private java.lang.Object version_ = ""; /** * required string version = 1; @@ -464,9 +464,12 @@ public final class FSProtos { public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - version_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + version_ = s; + } return s; } else { return (java.lang.String) ref; @@ -523,22 +526,59 @@ public final class FSProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.HBaseVersionFileContent) } + // @@protoc_insertion_point(class_scope:hbase.pb.HBaseVersionFileContent) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent DEFAULT_INSTANCE; static { - defaultInstance = new HBaseVersionFileContent(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public HBaseVersionFileContent parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new HBaseVersionFileContent(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.HBaseVersionFileContent) } - public interface ReferenceOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReferenceOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Reference) + com.google.protobuf.MessageOrBuilder { - // required bytes splitkey = 1; /** * required bytes splitkey = 1; */ @@ -548,7 +588,6 @@ public final class FSProtos { */ com.google.protobuf.ByteString getSplitkey(); - // required .hbase.pb.Reference.Range range = 2; /** * required .hbase.pb.Reference.Range range = 2; */ @@ -559,43 +598,36 @@ public final class FSProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range getRange(); } /** - * Protobuf type {@code hbase.pb.Reference} - * *
    **
    * Reference file content used when we split an hfile under a region.
    * 
+ * + * Protobuf type {@code hbase.pb.Reference} */ - public static final class Reference extends - com.google.protobuf.GeneratedMessage - implements ReferenceOrBuilder { + public static final class Reference extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Reference) + ReferenceOrBuilder { // Use Reference.newBuilder() to construct. - private Reference(com.google.protobuf.GeneratedMessage.Builder builder) { + private Reference(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private Reference(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Reference defaultInstance; - public static Reference getDefaultInstance() { - return defaultInstance; } - - public Reference getDefaultInstanceForType() { - return defaultInstance; + private Reference() { + splitkey_ = com.google.protobuf.ByteString.EMPTY; + range_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Reference( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -626,7 +658,7 @@ public final class FSProtos { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; - range_ = value; + range_ = rawValue; } break; } @@ -636,7 +668,7 @@ public final class FSProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -647,28 +679,13 @@ public final class FSProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_Reference_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_Reference_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Reference parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Reference(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.Reference.Range} */ @@ -677,11 +694,11 @@ public final class FSProtos { /** * TOP = 0; */ - TOP(0, 0), + TOP(0), /** * BOTTOM = 1; */ - BOTTOM(1, 1), + BOTTOM(1), ; /** @@ -694,9 +711,19 @@ public final class FSProtos { public static final int BOTTOM_VALUE = 1; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static Range valueOf(int value) { + return forNumber(value); + } + + public static Range forNumber(int value) { switch (value) { case 0: return TOP; case 1: return BOTTOM; @@ -708,17 +735,17 @@ public final class FSProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + Range> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public Range findValueByNumber(int number) { - return Range.valueOf(number); + return Range.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -740,11 +767,9 @@ public final class FSProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private Range(int index, int value) { - this.index = index; + private Range(int value) { this.value = value; } @@ -752,7 +777,6 @@ public final class FSProtos { } private int bitField0_; - // required bytes splitkey = 1; public static final int SPLITKEY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString splitkey_; /** @@ -768,9 +792,8 @@ public final class FSProtos { return splitkey_; } - // required .hbase.pb.Reference.Range range = 2; public static final int RANGE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range range_; + private int range_; /** * required .hbase.pb.Reference.Range range = 2; */ @@ -781,17 +804,15 @@ public final class FSProtos { * required .hbase.pb.Reference.Range range = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range getRange() { - return range_; + org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range result = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range.valueOf(range_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range.TOP : result; } - private void initFields() { - splitkey_ = com.google.protobuf.ByteString.EMPTY; - range_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range.TOP; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasSplitkey()) { memoizedIsInitialized = 0; @@ -807,19 +828,17 @@ public final class FSProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, splitkey_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, range_.getNumber()); + output.writeEnum(2, range_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -829,21 +848,15 @@ public final class FSProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, range_.getNumber()); + .computeEnumSize(2, range_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -861,15 +874,12 @@ public final class FSProtos { } result = result && (hasRange() == other.hasRange()); if (hasRange()) { - result = result && - (getRange() == other.getRange()); + result = result && range_ == other.range_; } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -883,9 +893,9 @@ public final class FSProtos { } if (hasRange()) { hash = (37 * hash) + RANGE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getRange()); + hash = (53 * hash) + range_; } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -913,66 +923,78 @@ public final class FSProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.Reference} - * *
      **
      * Reference file content used when we split an hfile under a region.
      * 
+ * + * Protobuf type {@code hbase.pb.Reference} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.ReferenceOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Reference) + org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.ReferenceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_Reference_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_Reference_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -985,31 +1007,24 @@ public final class FSProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); splitkey_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); - range_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range.TOP; + range_ = 0; bitField0_ = (bitField0_ & ~0x00000002); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.internal_static_hbase_pb_Reference_descriptor; @@ -1044,6 +1059,32 @@ public final class FSProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference)other); @@ -1061,17 +1102,16 @@ public final class FSProtos { if (other.hasRange()) { setRange(other.getRange()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasSplitkey()) { - return false; } if (!hasRange()) { - return false; } return true; @@ -1086,7 +1126,7 @@ public final class FSProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1096,7 +1136,6 @@ public final class FSProtos { } private int bitField0_; - // required bytes splitkey = 1; private com.google.protobuf.ByteString splitkey_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes splitkey = 1; @@ -1132,8 +1171,7 @@ public final class FSProtos { return this; } - // required .hbase.pb.Reference.Range range = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range range_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range.TOP; + private int range_ = 0; /** * required .hbase.pb.Reference.Range range = 2; */ @@ -1144,7 +1182,8 @@ public final class FSProtos { * required .hbase.pb.Reference.Range range = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range getRange() { - return range_; + org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range result = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range.valueOf(range_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range.TOP : result; } /** * required .hbase.pb.Reference.Range range = 2; @@ -1154,7 +1193,7 @@ public final class FSProtos { throw new NullPointerException(); } bitField0_ |= 0x00000002; - range_ = value; + range_ = value.getNumber(); onChanged(); return this; } @@ -1163,38 +1202,75 @@ public final class FSProtos { */ public Builder clearRange() { bitField0_ = (bitField0_ & ~0x00000002); - range_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Range.TOP; + range_ = 0; onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Reference) } + // @@protoc_insertion_point(class_scope:hbase.pb.Reference) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference DEFAULT_INSTANCE; static { - defaultInstance = new Reference(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Reference parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Reference(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Reference) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_HBaseVersionFileContent_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_HBaseVersionFileContent_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Reference_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Reference_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -1206,29 +1282,29 @@ public final class FSProtos { "d.protobuf.generatedB\010FSProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_HBaseVersionFileContent_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_HBaseVersionFileContent_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_HBaseVersionFileContent_descriptor, - new java.lang.String[] { "Version", }); - internal_static_hbase_pb_Reference_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_Reference_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Reference_descriptor, - new java.lang.String[] { "Splitkey", "Range", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_hbase_pb_HBaseVersionFileContent_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_HBaseVersionFileContent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_HBaseVersionFileContent_descriptor, + new java.lang.String[] { "Version", }); + internal_static_hbase_pb_Reference_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_Reference_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Reference_descriptor, + new java.lang.String[] { "Splitkey", "Range", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java index b63acb2..7204813 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class FilterProtos { private FilterProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface FilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Filter) + com.google.protobuf.MessageOrBuilder { - // required string name = 1; /** * required string name = 1; */ @@ -26,7 +32,6 @@ public final class FilterProtos { com.google.protobuf.ByteString getNameBytes(); - // optional bytes serialized_filter = 2; /** * optional bytes serialized_filter = 2; */ @@ -39,36 +44,29 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.Filter} */ - public static final class Filter extends - com.google.protobuf.GeneratedMessage - implements FilterOrBuilder { + public static final class Filter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Filter) + FilterOrBuilder { // Use Filter.newBuilder() to construct. - private Filter(com.google.protobuf.GeneratedMessage.Builder builder) { + private Filter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private Filter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Filter defaultInstance; - public static Filter getDefaultInstance() { - return defaultInstance; } - - public Filter getDefaultInstanceForType() { - return defaultInstance; + private Filter() { + name_ = ""; + serializedFilter_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Filter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -88,8 +86,9 @@ public final class FilterProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - name_ = input.readBytes(); + name_ = bs; break; } case 18: { @@ -103,7 +102,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -114,32 +113,16 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Filter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Filter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; + private volatile java.lang.Object name_; /** * required string name = 1; */ @@ -180,7 +163,6 @@ public final class FilterProtos { } } - // optional bytes serialized_filter = 2; public static final int SERIALIZED_FILTER_FIELD_NUMBER = 2; private com.google.protobuf.ByteString serializedFilter_; /** @@ -196,14 +178,11 @@ public final class FilterProtos { return serializedFilter_; } - private void initFields() { - name_ = ""; - serializedFilter_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; @@ -215,43 +194,34 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, serializedFilter_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, serializedFilter_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -272,12 +242,10 @@ public final class FilterProtos { result = result && getSerializedFilter() .equals(other.getSerializedFilter()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -293,7 +261,7 @@ public final class FilterProtos { hash = (37 * hash) + SERIALIZED_FILTER_FIELD_NUMBER; hash = (53 * hash) + getSerializedFilter().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -321,46 +289,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -368,14 +347,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.Filter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Filter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -388,18 +368,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); name_ = ""; @@ -409,10 +386,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_Filter_descriptor; @@ -447,6 +420,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter)other); @@ -466,13 +465,13 @@ public final class FilterProtos { if (other.hasSerializedFilter()) { setSerializedFilter(other.getSerializedFilter()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { - return false; } return true; @@ -487,7 +486,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -497,7 +496,6 @@ public final class FilterProtos { } private int bitField0_; - // required string name = 1; private java.lang.Object name_ = ""; /** * required string name = 1; @@ -511,9 +509,12 @@ public final class FilterProtos { public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - name_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; + } return s; } else { return (java.lang.String) ref; @@ -571,7 +572,6 @@ public final class FilterProtos { return this; } - // optional bytes serialized_filter = 2; private com.google.protobuf.ByteString serializedFilter_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes serialized_filter = 2; @@ -606,22 +606,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Filter) } + // @@protoc_insertion_point(class_scope:hbase.pb.Filter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter DEFAULT_INSTANCE; static { - defaultInstance = new Filter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Filter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Filter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Filter) } - public interface ColumnCountGetFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ColumnCountGetFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ColumnCountGetFilter) + com.google.protobuf.MessageOrBuilder { - // required int32 limit = 1; /** * required int32 limit = 1; */ @@ -634,36 +671,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.ColumnCountGetFilter} */ - public static final class ColumnCountGetFilter extends - com.google.protobuf.GeneratedMessage - implements ColumnCountGetFilterOrBuilder { + public static final class ColumnCountGetFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ColumnCountGetFilter) + ColumnCountGetFilterOrBuilder { // Use ColumnCountGetFilter.newBuilder() to construct. - private ColumnCountGetFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private ColumnCountGetFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ColumnCountGetFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnCountGetFilter defaultInstance; - public static ColumnCountGetFilter getDefaultInstance() { - return defaultInstance; } - - public ColumnCountGetFilter getDefaultInstanceForType() { - return defaultInstance; + private ColumnCountGetFilter() { + limit_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ColumnCountGetFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -693,7 +722,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -704,30 +733,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnCountGetFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnCountGetFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required int32 limit = 1; public static final int LIMIT_FIELD_NUMBER = 1; private int limit_; /** @@ -743,13 +756,11 @@ public final class FilterProtos { return limit_; } - private void initFields() { - limit_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasLimit()) { memoizedIsInitialized = 0; @@ -761,16 +772,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, limit_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -778,19 +787,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, limit_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -806,12 +809,10 @@ public final class FilterProtos { result = result && (getLimit() == other.getLimit()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -823,7 +824,7 @@ public final class FilterProtos { hash = (37 * hash) + LIMIT_FIELD_NUMBER; hash = (53 * hash) + getLimit(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -851,46 +852,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -898,14 +910,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.ColumnCountGetFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ColumnCountGetFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -918,18 +931,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); limit_ = 0; @@ -937,10 +947,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnCountGetFilter_descriptor; @@ -971,6 +977,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter)other); @@ -985,13 +1017,13 @@ public final class FilterProtos { if (other.hasLimit()) { setLimit(other.getLimit()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasLimit()) { - return false; } return true; @@ -1006,7 +1038,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1016,7 +1048,6 @@ public final class FilterProtos { } private int bitField0_; - // required int32 limit = 1; private int limit_ ; /** * required int32 limit = 1; @@ -1048,22 +1079,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnCountGetFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.ColumnCountGetFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter DEFAULT_INSTANCE; static { - defaultInstance = new ColumnCountGetFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ColumnCountGetFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnCountGetFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ColumnCountGetFilter) } - public interface ColumnPaginationFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ColumnPaginationFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ColumnPaginationFilter) + com.google.protobuf.MessageOrBuilder { - // required int32 limit = 1; /** * required int32 limit = 1; */ @@ -1073,7 +1141,6 @@ public final class FilterProtos { */ int getLimit(); - // optional int32 offset = 2; /** * optional int32 offset = 2; */ @@ -1083,7 +1150,6 @@ public final class FilterProtos { */ int getOffset(); - // optional bytes column_offset = 3; /** * optional bytes column_offset = 3; */ @@ -1096,36 +1162,30 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.ColumnPaginationFilter} */ - public static final class ColumnPaginationFilter extends - com.google.protobuf.GeneratedMessage - implements ColumnPaginationFilterOrBuilder { + public static final class ColumnPaginationFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ColumnPaginationFilter) + ColumnPaginationFilterOrBuilder { // Use ColumnPaginationFilter.newBuilder() to construct. - private ColumnPaginationFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private ColumnPaginationFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ColumnPaginationFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnPaginationFilter defaultInstance; - public static ColumnPaginationFilter getDefaultInstance() { - return defaultInstance; - } - - public ColumnPaginationFilter getDefaultInstanceForType() { - return defaultInstance; + private ColumnPaginationFilter() { + limit_ = 0; + offset_ = 0; + columnOffset_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ColumnPaginationFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1165,7 +1225,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1176,30 +1236,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnPaginationFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnPaginationFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required int32 limit = 1; public static final int LIMIT_FIELD_NUMBER = 1; private int limit_; /** @@ -1215,7 +1259,6 @@ public final class FilterProtos { return limit_; } - // optional int32 offset = 2; public static final int OFFSET_FIELD_NUMBER = 2; private int offset_; /** @@ -1231,7 +1274,6 @@ public final class FilterProtos { return offset_; } - // optional bytes column_offset = 3; public static final int COLUMN_OFFSET_FIELD_NUMBER = 3; private com.google.protobuf.ByteString columnOffset_; /** @@ -1247,15 +1289,11 @@ public final class FilterProtos { return columnOffset_; } - private void initFields() { - limit_ = 0; - offset_ = 0; - columnOffset_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasLimit()) { memoizedIsInitialized = 0; @@ -1267,7 +1305,6 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, limit_); } @@ -1277,12 +1314,11 @@ public final class FilterProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, columnOffset_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -1298,19 +1334,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, columnOffset_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1336,12 +1366,10 @@ public final class FilterProtos { result = result && getColumnOffset() .equals(other.getColumnOffset()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1361,7 +1389,7 @@ public final class FilterProtos { hash = (37 * hash) + COLUMN_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getColumnOffset().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1389,46 +1417,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1436,14 +1475,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.ColumnPaginationFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ColumnPaginationFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1456,18 +1496,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); limit_ = 0; @@ -1479,10 +1516,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPaginationFilter_descriptor; @@ -1521,6 +1554,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter)other); @@ -1541,13 +1600,13 @@ public final class FilterProtos { if (other.hasColumnOffset()) { setColumnOffset(other.getColumnOffset()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasLimit()) { - return false; } return true; @@ -1562,7 +1621,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1572,7 +1631,6 @@ public final class FilterProtos { } private int bitField0_; - // required int32 limit = 1; private int limit_ ; /** * required int32 limit = 1; @@ -1605,7 +1663,6 @@ public final class FilterProtos { return this; } - // optional int32 offset = 2; private int offset_ ; /** * optional int32 offset = 2; @@ -1638,7 +1695,6 @@ public final class FilterProtos { return this; } - // optional bytes column_offset = 3; private com.google.protobuf.ByteString columnOffset_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes column_offset = 3; @@ -1673,22 +1729,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnPaginationFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.ColumnPaginationFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter DEFAULT_INSTANCE; static { - defaultInstance = new ColumnPaginationFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ColumnPaginationFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnPaginationFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ColumnPaginationFilter) } - public interface ColumnPrefixFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ColumnPrefixFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ColumnPrefixFilter) + com.google.protobuf.MessageOrBuilder { - // required bytes prefix = 1; /** * required bytes prefix = 1; */ @@ -1701,36 +1794,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.ColumnPrefixFilter} */ - public static final class ColumnPrefixFilter extends - com.google.protobuf.GeneratedMessage - implements ColumnPrefixFilterOrBuilder { + public static final class ColumnPrefixFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ColumnPrefixFilter) + ColumnPrefixFilterOrBuilder { // Use ColumnPrefixFilter.newBuilder() to construct. - private ColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private ColumnPrefixFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnPrefixFilter defaultInstance; - public static ColumnPrefixFilter getDefaultInstance() { - return defaultInstance; } - - public ColumnPrefixFilter getDefaultInstanceForType() { - return defaultInstance; + private ColumnPrefixFilter() { + prefix_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ColumnPrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1760,7 +1845,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1771,30 +1856,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnPrefixFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnPrefixFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes prefix = 1; public static final int PREFIX_FIELD_NUMBER = 1; private com.google.protobuf.ByteString prefix_; /** @@ -1810,13 +1879,11 @@ public final class FilterProtos { return prefix_; } - private void initFields() { - prefix_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasPrefix()) { memoizedIsInitialized = 0; @@ -1828,16 +1895,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, prefix_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -1845,19 +1910,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, prefix_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1873,12 +1932,10 @@ public final class FilterProtos { result = result && getPrefix() .equals(other.getPrefix()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1890,7 +1947,7 @@ public final class FilterProtos { hash = (37 * hash) + PREFIX_FIELD_NUMBER; hash = (53 * hash) + getPrefix().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1918,46 +1975,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1965,14 +2033,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.ColumnPrefixFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ColumnPrefixFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1985,18 +2054,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); prefix_ = com.google.protobuf.ByteString.EMPTY; @@ -2004,10 +2070,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnPrefixFilter_descriptor; @@ -2038,6 +2100,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter)other); @@ -2052,13 +2140,13 @@ public final class FilterProtos { if (other.hasPrefix()) { setPrefix(other.getPrefix()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasPrefix()) { - return false; } return true; @@ -2073,7 +2161,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2083,7 +2171,6 @@ public final class FilterProtos { } private int bitField0_; - // required bytes prefix = 1; private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes prefix = 1; @@ -2118,32 +2205,68 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnPrefixFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.ColumnPrefixFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter DEFAULT_INSTANCE; static { - defaultInstance = new ColumnPrefixFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter(); } - // @@protoc_insertion_point(class_scope:hbase.pb.ColumnPrefixFilter) - } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } - public interface ColumnRangeFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ColumnPrefixFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnPrefixFilter(input, extensionRegistry); + } + }; - // optional bytes min_column = 1; - /** - * optional bytes min_column = 1; - */ - boolean hasMinColumn(); - /** - * optional bytes min_column = 1; - */ + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ColumnRangeFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ColumnRangeFilter) + com.google.protobuf.MessageOrBuilder { + + /** + * optional bytes min_column = 1; + */ + boolean hasMinColumn(); + /** + * optional bytes min_column = 1; + */ com.google.protobuf.ByteString getMinColumn(); - // optional bool min_column_inclusive = 2; /** * optional bool min_column_inclusive = 2; */ @@ -2153,7 +2276,6 @@ public final class FilterProtos { */ boolean getMinColumnInclusive(); - // optional bytes max_column = 3; /** * optional bytes max_column = 3; */ @@ -2163,7 +2285,6 @@ public final class FilterProtos { */ com.google.protobuf.ByteString getMaxColumn(); - // optional bool max_column_inclusive = 4; /** * optional bool max_column_inclusive = 4; */ @@ -2176,36 +2297,31 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.ColumnRangeFilter} */ - public static final class ColumnRangeFilter extends - com.google.protobuf.GeneratedMessage - implements ColumnRangeFilterOrBuilder { + public static final class ColumnRangeFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ColumnRangeFilter) + ColumnRangeFilterOrBuilder { // Use ColumnRangeFilter.newBuilder() to construct. - private ColumnRangeFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private ColumnRangeFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ColumnRangeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnRangeFilter defaultInstance; - public static ColumnRangeFilter getDefaultInstance() { - return defaultInstance; - } - - public ColumnRangeFilter getDefaultInstanceForType() { - return defaultInstance; + private ColumnRangeFilter() { + minColumn_ = com.google.protobuf.ByteString.EMPTY; + minColumnInclusive_ = false; + maxColumn_ = com.google.protobuf.ByteString.EMPTY; + maxColumnInclusive_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ColumnRangeFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2250,7 +2366,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2261,30 +2377,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnRangeFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnRangeFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bytes min_column = 1; public static final int MIN_COLUMN_FIELD_NUMBER = 1; private com.google.protobuf.ByteString minColumn_; /** @@ -2300,7 +2400,6 @@ public final class FilterProtos { return minColumn_; } - // optional bool min_column_inclusive = 2; public static final int MIN_COLUMN_INCLUSIVE_FIELD_NUMBER = 2; private boolean minColumnInclusive_; /** @@ -2316,7 +2415,6 @@ public final class FilterProtos { return minColumnInclusive_; } - // optional bytes max_column = 3; public static final int MAX_COLUMN_FIELD_NUMBER = 3; private com.google.protobuf.ByteString maxColumn_; /** @@ -2332,7 +2430,6 @@ public final class FilterProtos { return maxColumn_; } - // optional bool max_column_inclusive = 4; public static final int MAX_COLUMN_INCLUSIVE_FIELD_NUMBER = 4; private boolean maxColumnInclusive_; /** @@ -2348,16 +2445,11 @@ public final class FilterProtos { return maxColumnInclusive_; } - private void initFields() { - minColumn_ = com.google.protobuf.ByteString.EMPTY; - minColumnInclusive_ = false; - maxColumn_ = com.google.protobuf.ByteString.EMPTY; - maxColumnInclusive_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -2365,7 +2457,6 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, minColumn_); } @@ -2378,12 +2469,11 @@ public final class FilterProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(4, maxColumnInclusive_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -2403,19 +2493,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, maxColumnInclusive_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2446,12 +2530,10 @@ public final class FilterProtos { result = result && (getMaxColumnInclusive() == other.getMaxColumnInclusive()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2465,7 +2547,8 @@ public final class FilterProtos { } if (hasMinColumnInclusive()) { hash = (37 * hash) + MIN_COLUMN_INCLUSIVE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMinColumnInclusive()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getMinColumnInclusive()); } if (hasMaxColumn()) { hash = (37 * hash) + MAX_COLUMN_FIELD_NUMBER; @@ -2473,9 +2556,10 @@ public final class FilterProtos { } if (hasMaxColumnInclusive()) { hash = (37 * hash) + MAX_COLUMN_INCLUSIVE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMaxColumnInclusive()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getMaxColumnInclusive()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2503,46 +2587,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2550,14 +2645,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.ColumnRangeFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ColumnRangeFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2570,18 +2666,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); minColumn_ = com.google.protobuf.ByteString.EMPTY; @@ -2595,10 +2688,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ColumnRangeFilter_descriptor; @@ -2641,6 +2730,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter)other); @@ -2664,7 +2779,8 @@ public final class FilterProtos { if (other.hasMaxColumnInclusive()) { setMaxColumnInclusive(other.getMaxColumnInclusive()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -2681,7 +2797,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2691,7 +2807,6 @@ public final class FilterProtos { } private int bitField0_; - // optional bytes min_column = 1; private com.google.protobuf.ByteString minColumn_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes min_column = 1; @@ -2727,7 +2842,6 @@ public final class FilterProtos { return this; } - // optional bool min_column_inclusive = 2; private boolean minColumnInclusive_ ; /** * optional bool min_column_inclusive = 2; @@ -2760,7 +2874,6 @@ public final class FilterProtos { return this; } - // optional bytes max_column = 3; private com.google.protobuf.ByteString maxColumn_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes max_column = 3; @@ -2796,7 +2909,6 @@ public final class FilterProtos { return this; } - // optional bool max_column_inclusive = 4; private boolean maxColumnInclusive_ ; /** * optional bool max_column_inclusive = 4; @@ -2828,22 +2940,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnRangeFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.ColumnRangeFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter DEFAULT_INSTANCE; static { - defaultInstance = new ColumnRangeFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ColumnRangeFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnRangeFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ColumnRangeFilter) } - public interface CompareFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CompareFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CompareFilter) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.CompareType compare_op = 1; /** * required .hbase.pb.CompareType compare_op = 1; */ @@ -2853,7 +3002,6 @@ public final class FilterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareOp(); - // optional .hbase.pb.Comparator comparator = 2; /** * optional .hbase.pb.Comparator comparator = 2; */ @@ -2870,36 +3018,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.CompareFilter} */ - public static final class CompareFilter extends - com.google.protobuf.GeneratedMessage - implements CompareFilterOrBuilder { + public static final class CompareFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CompareFilter) + CompareFilterOrBuilder { // Use CompareFilter.newBuilder() to construct. - private CompareFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private CompareFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CompareFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CompareFilter defaultInstance; - public static CompareFilter getDefaultInstance() { - return defaultInstance; - } - - public CompareFilter getDefaultInstanceForType() { - return defaultInstance; + private CompareFilter() { + compareOp_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CompareFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2925,7 +3065,7 @@ public final class FilterProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - compareOp_ = value; + compareOp_ = rawValue; } break; } @@ -2948,7 +3088,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2959,32 +3099,16 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CompareFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CompareFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.CompareType compare_op = 1; public static final int COMPARE_OP_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType compareOp_; + private int compareOp_; /** * required .hbase.pb.CompareType compare_op = 1; */ @@ -2995,10 +3119,10 @@ public final class FilterProtos { * required .hbase.pb.CompareType compare_op = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareOp() { - return compareOp_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.valueOf(compareOp_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS : result; } - // optional .hbase.pb.Comparator comparator = 2; public static final int COMPARATOR_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_; /** @@ -3011,23 +3135,20 @@ public final class FilterProtos { * optional .hbase.pb.Comparator comparator = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getComparator() { - return comparator_; + return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } /** * optional .hbase.pb.Comparator comparator = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { - return comparator_; + return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } - private void initFields() { - compareOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; - comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasCompareOp()) { memoizedIsInitialized = 0; @@ -3045,43 +3166,35 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, compareOp_.getNumber()); + output.writeEnum(1, compareOp_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, comparator_); + output.writeMessage(2, getComparator()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, compareOp_.getNumber()); + .computeEnumSize(1, compareOp_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, comparator_); + .computeMessageSize(2, getComparator()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3094,20 +3207,17 @@ public final class FilterProtos { boolean result = true; result = result && (hasCompareOp() == other.hasCompareOp()); if (hasCompareOp()) { - result = result && - (getCompareOp() == other.getCompareOp()); + result = result && compareOp_ == other.compareOp_; } result = result && (hasComparator() == other.hasComparator()); if (hasComparator()) { result = result && getComparator() .equals(other.getComparator()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3117,13 +3227,13 @@ public final class FilterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompareOp()) { hash = (37 * hash) + COMPARE_OP_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getCompareOp()); + hash = (53 * hash) + compareOp_; } if (hasComparator()) { hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; hash = (53 * hash) + getComparator().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3151,46 +3261,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3198,14 +3319,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.CompareFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CompareFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3218,25 +3340,22 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getComparatorFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - compareOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; + compareOp_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + comparator_ = null; } else { comparatorBuilder_.clear(); } @@ -3244,10 +3363,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_CompareFilter_descriptor; @@ -3286,6 +3401,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter)other); @@ -3303,18 +3444,17 @@ public final class FilterProtos { if (other.hasComparator()) { mergeComparator(other.getComparator()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasCompareOp()) { - return false; } if (hasComparator()) { if (!getComparator().isInitialized()) { - return false; } } @@ -3330,7 +3470,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3340,8 +3480,7 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.CompareType compare_op = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; + private int compareOp_ = 0; /** * required .hbase.pb.CompareType compare_op = 1; */ @@ -3352,7 +3491,8 @@ public final class FilterProtos { * required .hbase.pb.CompareType compare_op = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareOp() { - return compareOp_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.valueOf(compareOp_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS : result; } /** * required .hbase.pb.CompareType compare_op = 1; @@ -3362,7 +3502,7 @@ public final class FilterProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - compareOp_ = value; + compareOp_ = value.getNumber(); onChanged(); return this; } @@ -3371,14 +3511,13 @@ public final class FilterProtos { */ public Builder clearCompareOp() { bitField0_ = (bitField0_ & ~0x00000001); - compareOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; + compareOp_ = 0; onChanged(); return this; } - // optional .hbase.pb.Comparator comparator = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; /** * optional .hbase.pb.Comparator comparator = 2; @@ -3391,7 +3530,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getComparator() { if (comparatorBuilder_ == null) { - return comparator_; + return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } else { return comparatorBuilder_.getMessage(); } @@ -3432,6 +3571,7 @@ public final class FilterProtos { public Builder mergeComparator(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + comparator_ != null && comparator_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); @@ -3450,7 +3590,7 @@ public final class FilterProtos { */ public Builder clearComparator() { if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + comparator_ = null; onChanged(); } else { comparatorBuilder_.clear(); @@ -3473,41 +3613,79 @@ public final class FilterProtos { if (comparatorBuilder_ != null) { return comparatorBuilder_.getMessageOrBuilder(); } else { - return comparator_; + return comparator_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } } /** * optional .hbase.pb.Comparator comparator = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder() { if (comparatorBuilder_ == null) { - comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( - comparator_, + getComparator(), getParentForChildren(), isClean()); comparator_ = null; } return comparatorBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CompareFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.CompareFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter DEFAULT_INSTANCE; static { - defaultInstance = new CompareFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CompareFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CompareFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CompareFilter) } - public interface DependentColumnFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DependentColumnFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DependentColumnFilter) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.CompareFilter compare_filter = 1; /** * required .hbase.pb.CompareFilter compare_filter = 1; */ @@ -3521,7 +3699,6 @@ public final class FilterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); - // optional bytes column_family = 2; /** * optional bytes column_family = 2; */ @@ -3531,7 +3708,6 @@ public final class FilterProtos { */ com.google.protobuf.ByteString getColumnFamily(); - // optional bytes column_qualifier = 3; /** * optional bytes column_qualifier = 3; */ @@ -3541,7 +3717,6 @@ public final class FilterProtos { */ com.google.protobuf.ByteString getColumnQualifier(); - // optional bool drop_dependent_column = 4; /** * optional bool drop_dependent_column = 4; */ @@ -3554,36 +3729,30 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.DependentColumnFilter} */ - public static final class DependentColumnFilter extends - com.google.protobuf.GeneratedMessage - implements DependentColumnFilterOrBuilder { + public static final class DependentColumnFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DependentColumnFilter) + DependentColumnFilterOrBuilder { // Use DependentColumnFilter.newBuilder() to construct. - private DependentColumnFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private DependentColumnFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DependentColumnFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DependentColumnFilter defaultInstance; - public static DependentColumnFilter getDefaultInstance() { - return defaultInstance; } - - public DependentColumnFilter getDefaultInstanceForType() { - return defaultInstance; + private DependentColumnFilter() { + columnFamily_ = com.google.protobuf.ByteString.EMPTY; + columnQualifier_ = com.google.protobuf.ByteString.EMPTY; + dropDependentColumn_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DependentColumnFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3636,7 +3805,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -3647,30 +3816,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DependentColumnFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DependentColumnFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.CompareFilter compare_filter = 1; public static final int COMPARE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_; /** @@ -3683,16 +3836,15 @@ public final class FilterProtos { * required .hbase.pb.CompareFilter compare_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } /** * required .hbase.pb.CompareFilter compare_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } - // optional bytes column_family = 2; public static final int COLUMN_FAMILY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString columnFamily_; /** @@ -3708,7 +3860,6 @@ public final class FilterProtos { return columnFamily_; } - // optional bytes column_qualifier = 3; public static final int COLUMN_QUALIFIER_FIELD_NUMBER = 3; private com.google.protobuf.ByteString columnQualifier_; /** @@ -3724,7 +3875,6 @@ public final class FilterProtos { return columnQualifier_; } - // optional bool drop_dependent_column = 4; public static final int DROP_DEPENDENT_COLUMN_FIELD_NUMBER = 4; private boolean dropDependentColumn_; /** @@ -3740,16 +3890,11 @@ public final class FilterProtos { return dropDependentColumn_; } - private void initFields() { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - columnFamily_ = com.google.protobuf.ByteString.EMPTY; - columnQualifier_ = com.google.protobuf.ByteString.EMPTY; - dropDependentColumn_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasCompareFilter()) { memoizedIsInitialized = 0; @@ -3765,9 +3910,8 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, compareFilter_); + output.writeMessage(1, getCompareFilter()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, columnFamily_); @@ -3778,18 +3922,17 @@ public final class FilterProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(4, dropDependentColumn_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, compareFilter_); + .computeMessageSize(1, getCompareFilter()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -3803,19 +3946,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, dropDependentColumn_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3846,12 +3983,10 @@ public final class FilterProtos { result = result && (getDropDependentColumn() == other.getDropDependentColumn()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3873,9 +4008,10 @@ public final class FilterProtos { } if (hasDropDependentColumn()) { hash = (37 * hash) + DROP_DEPENDENT_COLUMN_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getDropDependentColumn()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDropDependentColumn()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3903,46 +4039,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3950,14 +4097,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.DependentColumnFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DependentColumnFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3970,23 +4118,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getCompareFilterFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + compareFilter_ = null; } else { compareFilterBuilder_.clear(); } @@ -4000,10 +4145,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_DependentColumnFilter_descriptor; @@ -4050,6 +4191,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter)other); @@ -4073,17 +4240,16 @@ public final class FilterProtos { if (other.hasDropDependentColumn()) { setDropDependentColumn(other.getDropDependentColumn()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasCompareFilter()) { - return false; } if (!getCompareFilter().isInitialized()) { - return false; } return true; @@ -4098,7 +4264,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4108,9 +4274,8 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.CompareFilter compare_filter = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; /** * required .hbase.pb.CompareFilter compare_filter = 1; @@ -4123,7 +4288,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } else { return compareFilterBuilder_.getMessage(); } @@ -4164,6 +4329,7 @@ public final class FilterProtos { public Builder mergeCompareFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + compareFilter_ != null && compareFilter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); @@ -4182,7 +4348,7 @@ public final class FilterProtos { */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + compareFilter_ = null; onChanged(); } else { compareFilterBuilder_.clear(); @@ -4205,19 +4371,20 @@ public final class FilterProtos { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); } else { - return compareFilter_; + return compareFilter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } } /** * required .hbase.pb.CompareFilter compare_filter = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { if (compareFilterBuilder_ == null) { - compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( - compareFilter_, + getCompareFilter(), getParentForChildren(), isClean()); compareFilter_ = null; @@ -4225,7 +4392,6 @@ public final class FilterProtos { return compareFilterBuilder_; } - // optional bytes column_family = 2; private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes column_family = 2; @@ -4261,7 +4427,6 @@ public final class FilterProtos { return this; } - // optional bytes column_qualifier = 3; private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes column_qualifier = 3; @@ -4297,7 +4462,6 @@ public final class FilterProtos { return this; } - // optional bool drop_dependent_column = 4; private boolean dropDependentColumn_ ; /** * optional bool drop_dependent_column = 4; @@ -4329,22 +4493,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DependentColumnFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.DependentColumnFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter DEFAULT_INSTANCE; static { - defaultInstance = new DependentColumnFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DependentColumnFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DependentColumnFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DependentColumnFilter) } - public interface FamilyFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FamilyFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FamilyFilter) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.CompareFilter compare_filter = 1; /** * required .hbase.pb.CompareFilter compare_filter = 1; */ @@ -4361,36 +4562,27 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.FamilyFilter} */ - public static final class FamilyFilter extends - com.google.protobuf.GeneratedMessage - implements FamilyFilterOrBuilder { + public static final class FamilyFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FamilyFilter) + FamilyFilterOrBuilder { // Use FamilyFilter.newBuilder() to construct. - private FamilyFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private FamilyFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FamilyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FamilyFilter defaultInstance; - public static FamilyFilter getDefaultInstance() { - return defaultInstance; } - - public FamilyFilter getDefaultInstanceForType() { - return defaultInstance; + private FamilyFilter() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FamilyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4428,7 +4620,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -4439,30 +4631,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FamilyFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FamilyFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.CompareFilter compare_filter = 1; public static final int COMPARE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_; /** @@ -4475,22 +4651,20 @@ public final class FilterProtos { * required .hbase.pb.CompareFilter compare_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } /** * required .hbase.pb.CompareFilter compare_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } - private void initFields() { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasCompareFilter()) { memoizedIsInitialized = 0; @@ -4506,36 +4680,28 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, compareFilter_); + output.writeMessage(1, getCompareFilter()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, compareFilter_); + .computeMessageSize(1, getCompareFilter()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4551,12 +4717,10 @@ public final class FilterProtos { result = result && getCompareFilter() .equals(other.getCompareFilter()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4568,7 +4732,7 @@ public final class FilterProtos { hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getCompareFilter().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4596,46 +4760,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4643,14 +4818,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.FamilyFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FamilyFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4663,23 +4839,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getCompareFilterFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + compareFilter_ = null; } else { compareFilterBuilder_.clear(); } @@ -4687,10 +4860,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FamilyFilter_descriptor; @@ -4725,6 +4894,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter)other); @@ -4739,17 +4934,16 @@ public final class FilterProtos { if (other.hasCompareFilter()) { mergeCompareFilter(other.getCompareFilter()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasCompareFilter()) { - return false; } if (!getCompareFilter().isInitialized()) { - return false; } return true; @@ -4764,7 +4958,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4774,9 +4968,8 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.CompareFilter compare_filter = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; /** * required .hbase.pb.CompareFilter compare_filter = 1; @@ -4789,7 +4982,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } else { return compareFilterBuilder_.getMessage(); } @@ -4830,6 +5023,7 @@ public final class FilterProtos { public Builder mergeCompareFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + compareFilter_ != null && compareFilter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); @@ -4848,7 +5042,7 @@ public final class FilterProtos { */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + compareFilter_ = null; onChanged(); } else { compareFilterBuilder_.clear(); @@ -4871,41 +5065,79 @@ public final class FilterProtos { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); } else { - return compareFilter_; + return compareFilter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } } /** * required .hbase.pb.CompareFilter compare_filter = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { if (compareFilterBuilder_ == null) { - compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( - compareFilter_, + getCompareFilter(), getParentForChildren(), isClean()); compareFilter_ = null; } return compareFilterBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FamilyFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.FamilyFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter DEFAULT_INSTANCE; static { - defaultInstance = new FamilyFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FamilyFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FamilyFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FamilyFilter) } - public interface FilterListOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FilterListOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FilterList) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.FilterList.Operator operator = 1; /** * required .hbase.pb.FilterList.Operator operator = 1; */ @@ -4915,7 +5147,6 @@ public final class FilterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator getOperator(); - // repeated .hbase.pb.Filter filters = 2; /** * repeated .hbase.pb.Filter filters = 2; */ @@ -4943,36 +5174,29 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.FilterList} */ - public static final class FilterList extends - com.google.protobuf.GeneratedMessage - implements FilterListOrBuilder { + public static final class FilterList extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FilterList) + FilterListOrBuilder { // Use FilterList.newBuilder() to construct. - private FilterList(com.google.protobuf.GeneratedMessage.Builder builder) { + private FilterList(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private FilterList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FilterList defaultInstance; - public static FilterList getDefaultInstance() { - return defaultInstance; - } - - public FilterList getDefaultInstanceForType() { - return defaultInstance; + private FilterList() { + operator_ = 1; + filters_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FilterList( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4998,7 +5222,7 @@ public final class FilterProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - operator_ = value; + operator_ = rawValue; } break; } @@ -5007,7 +5231,8 @@ public final class FilterProtos { filters_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - filters_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry)); + filters_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry)); break; } } @@ -5016,7 +5241,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { filters_ = java.util.Collections.unmodifiableList(filters_); @@ -5030,28 +5255,13 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FilterList parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FilterList(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.FilterList.Operator} */ @@ -5060,11 +5270,11 @@ public final class FilterProtos { /** * MUST_PASS_ALL = 1; */ - MUST_PASS_ALL(0, 1), + MUST_PASS_ALL(1), /** * MUST_PASS_ONE = 2; */ - MUST_PASS_ONE(1, 2), + MUST_PASS_ONE(2), ; /** @@ -5077,9 +5287,19 @@ public final class FilterProtos { public static final int MUST_PASS_ONE_VALUE = 2; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static Operator valueOf(int value) { + return forNumber(value); + } + + public static Operator forNumber(int value) { switch (value) { case 1: return MUST_PASS_ALL; case 2: return MUST_PASS_ONE; @@ -5091,17 +5311,17 @@ public final class FilterProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + Operator> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public Operator findValueByNumber(int number) { - return Operator.valueOf(number); + return Operator.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -5123,11 +5343,9 @@ public final class FilterProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private Operator(int index, int value) { - this.index = index; + private Operator(int value) { this.value = value; } @@ -5135,9 +5353,8 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.FilterList.Operator operator = 1; public static final int OPERATOR_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator operator_; + private int operator_; /** * required .hbase.pb.FilterList.Operator operator = 1; */ @@ -5148,10 +5365,10 @@ public final class FilterProtos { * required .hbase.pb.FilterList.Operator operator = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { - return operator_; + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator result = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator.valueOf(operator_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL : result; } - // repeated .hbase.pb.Filter filters = 2; public static final int FILTERS_FIELD_NUMBER = 2; private java.util.List filters_; /** @@ -5187,14 +5404,11 @@ public final class FilterProtos { return filters_.get(index); } - private void initFields() { - operator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; - filters_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasOperator()) { memoizedIsInitialized = 0; @@ -5212,43 +5426,35 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, operator_.getNumber()); + output.writeEnum(1, operator_); } for (int i = 0; i < filters_.size(); i++) { output.writeMessage(2, filters_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, operator_.getNumber()); + .computeEnumSize(1, operator_); } for (int i = 0; i < filters_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, filters_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5261,17 +5467,14 @@ public final class FilterProtos { boolean result = true; result = result && (hasOperator() == other.hasOperator()); if (hasOperator()) { - result = result && - (getOperator() == other.getOperator()); + result = result && operator_ == other.operator_; } result = result && getFiltersList() .equals(other.getFiltersList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5281,13 +5484,13 @@ public final class FilterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasOperator()) { hash = (37 * hash) + OPERATOR_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getOperator()); + hash = (53 * hash) + operator_; } if (getFiltersCount() > 0) { hash = (37 * hash) + FILTERS_FIELD_NUMBER; hash = (53 * hash) + getFiltersList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5315,46 +5518,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -5362,14 +5576,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.FilterList} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterListOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FilterList) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterListOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5382,22 +5597,19 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getFiltersFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - operator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; + operator_ = 1; bitField0_ = (bitField0_ & ~0x00000001); if (filtersBuilder_ == null) { filters_ = java.util.Collections.emptyList(); @@ -5408,10 +5620,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterList_descriptor; @@ -5451,6 +5659,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList)other); @@ -5484,25 +5718,24 @@ public final class FilterProtos { filters_ = other.filters_; bitField0_ = (bitField0_ & ~0x00000002); filtersBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFiltersFieldBuilder() : null; } else { filtersBuilder_.addAllMessages(other.filters_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasOperator()) { - return false; } for (int i = 0; i < getFiltersCount(); i++) { if (!getFilters(i).isInitialized()) { - return false; } } @@ -5518,7 +5751,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5528,8 +5761,7 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.FilterList.Operator operator = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator operator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; + private int operator_ = 1; /** * required .hbase.pb.FilterList.Operator operator = 1; */ @@ -5540,7 +5772,8 @@ public final class FilterProtos { * required .hbase.pb.FilterList.Operator operator = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { - return operator_; + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator result = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator.valueOf(operator_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL : result; } /** * required .hbase.pb.FilterList.Operator operator = 1; @@ -5550,7 +5783,7 @@ public final class FilterProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - operator_ = value; + operator_ = value.getNumber(); onChanged(); return this; } @@ -5559,12 +5792,11 @@ public final class FilterProtos { */ public Builder clearOperator() { bitField0_ = (bitField0_ & ~0x00000001); - operator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; + operator_ = 1; onChanged(); return this; } - // repeated .hbase.pb.Filter filters = 2; private java.util.List filters_ = java.util.Collections.emptyList(); private void ensureFiltersIsMutable() { @@ -5574,7 +5806,7 @@ public final class FilterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> filtersBuilder_; /** @@ -5706,7 +5938,8 @@ public final class FilterProtos { java.lang.Iterable values) { if (filtersBuilder_ == null) { ensureFiltersIsMutable(); - super.addAll(values, filters_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, filters_); onChanged(); } else { filtersBuilder_.addAllMessages(values); @@ -5789,11 +6022,11 @@ public final class FilterProtos { getFiltersBuilderList() { return getFiltersFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> getFiltersFieldBuilder() { if (filtersBuilder_ == null) { - filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder>( filters_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -5803,22 +6036,59 @@ public final class FilterProtos { } return filtersBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FilterList) } + // @@protoc_insertion_point(class_scope:hbase.pb.FilterList) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList DEFAULT_INSTANCE; static { - defaultInstance = new FilterList(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FilterList parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FilterList(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FilterList) } - public interface FilterWrapperOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FilterWrapperOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FilterWrapper) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.Filter filter = 1; /** * required .hbase.pb.Filter filter = 1; */ @@ -5835,36 +6105,27 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.FilterWrapper} */ - public static final class FilterWrapper extends - com.google.protobuf.GeneratedMessage - implements FilterWrapperOrBuilder { + public static final class FilterWrapper extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FilterWrapper) + FilterWrapperOrBuilder { // Use FilterWrapper.newBuilder() to construct. - private FilterWrapper(com.google.protobuf.GeneratedMessage.Builder builder) { + private FilterWrapper(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FilterWrapper(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FilterWrapper defaultInstance; - public static FilterWrapper getDefaultInstance() { - return defaultInstance; } - - public FilterWrapper getDefaultInstanceForType() { - return defaultInstance; + private FilterWrapper() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FilterWrapper( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -5902,7 +6163,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -5913,30 +6174,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FilterWrapper parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FilterWrapper(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.Filter filter = 1; public static final int FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_; /** @@ -5949,22 +6194,20 @@ public final class FilterProtos { * required .hbase.pb.Filter filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } /** * required .hbase.pb.Filter filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } - private void initFields() { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFilter()) { memoizedIsInitialized = 0; @@ -5980,36 +6223,28 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, filter_); + output.writeMessage(1, getFilter()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, filter_); + .computeMessageSize(1, getFilter()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6025,12 +6260,10 @@ public final class FilterProtos { result = result && getFilter() .equals(other.getFilter()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6042,7 +6275,7 @@ public final class FilterProtos { hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6070,46 +6303,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6117,14 +6361,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.FilterWrapper} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapperOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FilterWrapper) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapperOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6137,23 +6382,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getFilterFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); + filter_ = null; } else { filterBuilder_.clear(); } @@ -6161,10 +6403,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterWrapper_descriptor; @@ -6199,6 +6437,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper)other); @@ -6213,17 +6477,16 @@ public final class FilterProtos { if (other.hasFilter()) { mergeFilter(other.getFilter()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFilter()) { - return false; } if (!getFilter().isInitialized()) { - return false; } return true; @@ -6238,7 +6501,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6248,9 +6511,8 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.Filter filter = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; /** * required .hbase.pb.Filter filter = 1; @@ -6263,7 +6525,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { if (filterBuilder_ == null) { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } else { return filterBuilder_.getMessage(); } @@ -6304,6 +6566,7 @@ public final class FilterProtos { public Builder mergeFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + filter_ != null && filter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); @@ -6322,7 +6585,7 @@ public final class FilterProtos { */ public Builder clearFilter() { if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); + filter_ = null; onChanged(); } else { filterBuilder_.clear(); @@ -6345,73 +6608,103 @@ public final class FilterProtos { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { - return filter_; + return filter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } } /** * required .hbase.pb.Filter filter = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + filterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder>( - filter_, + getFilter(), getParentForChildren(), isClean()); filter_ = null; } return filterBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FilterWrapper) } + // @@protoc_insertion_point(class_scope:hbase.pb.FilterWrapper) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper DEFAULT_INSTANCE; static { - defaultInstance = new FilterWrapper(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FilterWrapper parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FilterWrapper(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FilterWrapper) } - public interface FirstKeyOnlyFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FirstKeyOnlyFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FirstKeyOnlyFilter) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.FirstKeyOnlyFilter} */ - public static final class FirstKeyOnlyFilter extends - com.google.protobuf.GeneratedMessage - implements FirstKeyOnlyFilterOrBuilder { + public static final class FirstKeyOnlyFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FirstKeyOnlyFilter) + FirstKeyOnlyFilterOrBuilder { // Use FirstKeyOnlyFilter.newBuilder() to construct. - private FirstKeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private FirstKeyOnlyFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FirstKeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FirstKeyOnlyFilter defaultInstance; - public static FirstKeyOnlyFilter getDefaultInstance() { - return defaultInstance; } - - public FirstKeyOnlyFilter getDefaultInstanceForType() { - return defaultInstance; + private FirstKeyOnlyFilter() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FirstKeyOnlyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -6435,7 +6728,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -6446,34 +6739,18 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FirstKeyOnlyFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FirstKeyOnlyFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -6481,29 +6758,21 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6514,12 +6783,10 @@ public final class FilterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6527,7 +6794,7 @@ public final class FilterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6555,46 +6822,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6602,14 +6880,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.FirstKeyOnlyFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FirstKeyOnlyFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6622,27 +6901,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor; @@ -6666,6 +6938,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)other); @@ -6677,7 +6975,8 @@ public final class FilterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -6694,7 +6993,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6702,22 +7001,59 @@ public final class FilterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FirstKeyOnlyFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.FirstKeyOnlyFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter DEFAULT_INSTANCE; static { - defaultInstance = new FirstKeyOnlyFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FirstKeyOnlyFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FirstKeyOnlyFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FirstKeyOnlyFilter) } - public interface FirstKeyValueMatchingQualifiersFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FirstKeyValueMatchingQualifiersFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FirstKeyValueMatchingQualifiersFilter) + com.google.protobuf.MessageOrBuilder { - // repeated bytes qualifiers = 1; /** * repeated bytes qualifiers = 1; */ @@ -6734,36 +7070,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.FirstKeyValueMatchingQualifiersFilter} */ - public static final class FirstKeyValueMatchingQualifiersFilter extends - com.google.protobuf.GeneratedMessage - implements FirstKeyValueMatchingQualifiersFilterOrBuilder { + public static final class FirstKeyValueMatchingQualifiersFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FirstKeyValueMatchingQualifiersFilter) + FirstKeyValueMatchingQualifiersFilterOrBuilder { // Use FirstKeyValueMatchingQualifiersFilter.newBuilder() to construct. - private FirstKeyValueMatchingQualifiersFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private FirstKeyValueMatchingQualifiersFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FirstKeyValueMatchingQualifiersFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FirstKeyValueMatchingQualifiersFilter defaultInstance; - public static FirstKeyValueMatchingQualifiersFilter getDefaultInstance() { - return defaultInstance; } - - public FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { - return defaultInstance; + private FirstKeyValueMatchingQualifiersFilter() { + qualifiers_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FirstKeyValueMatchingQualifiersFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6796,7 +7124,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_); @@ -6810,29 +7138,13 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FirstKeyValueMatchingQualifiersFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FirstKeyValueMatchingQualifiersFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated bytes qualifiers = 1; public static final int QUALIFIERS_FIELD_NUMBER = 1; private java.util.List qualifiers_; /** @@ -6855,13 +7167,11 @@ public final class FilterProtos { return qualifiers_.get(index); } - private void initFields() { - qualifiers_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -6869,16 +7179,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < qualifiers_.size(); i++) { output.writeBytes(1, qualifiers_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -6891,19 +7199,13 @@ public final class FilterProtos { size += dataSize; size += 1 * getQualifiersList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6916,12 +7218,10 @@ public final class FilterProtos { boolean result = true; result = result && getQualifiersList() .equals(other.getQualifiersList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6933,7 +7233,7 @@ public final class FilterProtos { hash = (37 * hash) + QUALIFIERS_FIELD_NUMBER; hash = (53 * hash) + getQualifiersList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6961,46 +7261,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -7008,14 +7319,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.FirstKeyValueMatchingQualifiersFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FirstKeyValueMatchingQualifiersFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7028,18 +7340,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); qualifiers_ = java.util.Collections.emptyList(); @@ -7047,10 +7356,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor; @@ -7080,6 +7385,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)other); @@ -7101,7 +7432,8 @@ public final class FilterProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -7118,7 +7450,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7128,7 +7460,6 @@ public final class FilterProtos { } private int bitField0_; - // repeated bytes qualifiers = 1; private java.util.List qualifiers_ = java.util.Collections.emptyList(); private void ensureQualifiersIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { @@ -7186,7 +7517,8 @@ public final class FilterProtos { public Builder addAllQualifiers( java.lang.Iterable values) { ensureQualifiersIsMutable(); - super.addAll(values, qualifiers_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, qualifiers_); onChanged(); return this; } @@ -7199,22 +7531,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FirstKeyValueMatchingQualifiersFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.FirstKeyValueMatchingQualifiersFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter DEFAULT_INSTANCE; static { - defaultInstance = new FirstKeyValueMatchingQualifiersFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FirstKeyValueMatchingQualifiersFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FirstKeyValueMatchingQualifiersFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FirstKeyValueMatchingQualifiersFilter) } - public interface FuzzyRowFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FuzzyRowFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FuzzyRowFilter) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1; /** * repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1; */ @@ -7242,36 +7611,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.FuzzyRowFilter} */ - public static final class FuzzyRowFilter extends - com.google.protobuf.GeneratedMessage - implements FuzzyRowFilterOrBuilder { + public static final class FuzzyRowFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FuzzyRowFilter) + FuzzyRowFilterOrBuilder { // Use FuzzyRowFilter.newBuilder() to construct. - private FuzzyRowFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private FuzzyRowFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FuzzyRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FuzzyRowFilter defaultInstance; - public static FuzzyRowFilter getDefaultInstance() { - return defaultInstance; } - - public FuzzyRowFilter getDefaultInstanceForType() { - return defaultInstance; + private FuzzyRowFilter() { + fuzzyKeysData_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FuzzyRowFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7295,7 +7656,8 @@ public final class FilterProtos { fuzzyKeysData_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - fuzzyKeysData_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); + fuzzyKeysData_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } } @@ -7304,7 +7666,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_); @@ -7318,29 +7680,13 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FuzzyRowFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FuzzyRowFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1; public static final int FUZZY_KEYS_DATA_FIELD_NUMBER = 1; private java.util.List fuzzyKeysData_; /** @@ -7376,13 +7722,11 @@ public final class FilterProtos { return fuzzyKeysData_.get(index); } - private void initFields() { - fuzzyKeysData_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getFuzzyKeysDataCount(); i++) { if (!getFuzzyKeysData(i).isInitialized()) { @@ -7396,16 +7740,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < fuzzyKeysData_.size(); i++) { output.writeMessage(1, fuzzyKeysData_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -7413,19 +7755,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, fuzzyKeysData_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7438,12 +7774,10 @@ public final class FilterProtos { boolean result = true; result = result && getFuzzyKeysDataList() .equals(other.getFuzzyKeysDataList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7455,7 +7789,7 @@ public final class FilterProtos { hash = (37 * hash) + FUZZY_KEYS_DATA_FIELD_NUMBER; hash = (53 * hash) + getFuzzyKeysDataList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7483,46 +7817,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -7530,14 +7875,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.FuzzyRowFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FuzzyRowFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7550,19 +7896,16 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getFuzzyKeysDataFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (fuzzyKeysDataBuilder_ == null) { @@ -7574,10 +7917,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FuzzyRowFilter_descriptor; @@ -7611,6 +7950,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter)other); @@ -7641,21 +8006,21 @@ public final class FilterProtos { fuzzyKeysData_ = other.fuzzyKeysData_; bitField0_ = (bitField0_ & ~0x00000001); fuzzyKeysDataBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFuzzyKeysDataFieldBuilder() : null; } else { fuzzyKeysDataBuilder_.addAllMessages(other.fuzzyKeysData_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getFuzzyKeysDataCount(); i++) { if (!getFuzzyKeysData(i).isInitialized()) { - return false; } } @@ -7671,7 +8036,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7681,7 +8046,6 @@ public final class FilterProtos { } private int bitField0_; - // repeated .hbase.pb.BytesBytesPair fuzzy_keys_data = 1; private java.util.List fuzzyKeysData_ = java.util.Collections.emptyList(); private void ensureFuzzyKeysDataIsMutable() { @@ -7691,7 +8055,7 @@ public final class FilterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> fuzzyKeysDataBuilder_; /** @@ -7823,7 +8187,8 @@ public final class FilterProtos { java.lang.Iterable values) { if (fuzzyKeysDataBuilder_ == null) { ensureFuzzyKeysDataIsMutable(); - super.addAll(values, fuzzyKeysData_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, fuzzyKeysData_); onChanged(); } else { fuzzyKeysDataBuilder_.addAllMessages(values); @@ -7906,11 +8271,11 @@ public final class FilterProtos { getFuzzyKeysDataBuilderList() { return getFuzzyKeysDataFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getFuzzyKeysDataFieldBuilder() { if (fuzzyKeysDataBuilder_ == null) { - fuzzyKeysDataBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + fuzzyKeysDataBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( fuzzyKeysData_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -7920,22 +8285,59 @@ public final class FilterProtos { } return fuzzyKeysDataBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FuzzyRowFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.FuzzyRowFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter DEFAULT_INSTANCE; static { - defaultInstance = new FuzzyRowFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FuzzyRowFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FuzzyRowFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FuzzyRowFilter) } - public interface InclusiveStopFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface InclusiveStopFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.InclusiveStopFilter) + com.google.protobuf.MessageOrBuilder { - // optional bytes stop_row_key = 1; /** * optional bytes stop_row_key = 1; */ @@ -7948,36 +8350,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.InclusiveStopFilter} */ - public static final class InclusiveStopFilter extends - com.google.protobuf.GeneratedMessage - implements InclusiveStopFilterOrBuilder { + public static final class InclusiveStopFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.InclusiveStopFilter) + InclusiveStopFilterOrBuilder { // Use InclusiveStopFilter.newBuilder() to construct. - private InclusiveStopFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private InclusiveStopFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private InclusiveStopFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final InclusiveStopFilter defaultInstance; - public static InclusiveStopFilter getDefaultInstance() { - return defaultInstance; } - - public InclusiveStopFilter getDefaultInstanceForType() { - return defaultInstance; + private InclusiveStopFilter() { + stopRowKey_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private InclusiveStopFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8007,7 +8401,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -8018,30 +8412,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public InclusiveStopFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new InclusiveStopFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bytes stop_row_key = 1; public static final int STOP_ROW_KEY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString stopRowKey_; /** @@ -8057,13 +8435,11 @@ public final class FilterProtos { return stopRowKey_; } - private void initFields() { - stopRowKey_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -8071,16 +8447,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, stopRowKey_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -8088,19 +8462,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, stopRowKey_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8116,12 +8484,10 @@ public final class FilterProtos { result = result && getStopRowKey() .equals(other.getStopRowKey()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -8133,7 +8499,7 @@ public final class FilterProtos { hash = (37 * hash) + STOP_ROW_KEY_FIELD_NUMBER; hash = (53 * hash) + getStopRowKey().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -8161,46 +8527,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -8208,14 +8585,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.InclusiveStopFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.InclusiveStopFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8228,18 +8606,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); stopRowKey_ = com.google.protobuf.ByteString.EMPTY; @@ -8247,10 +8622,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_InclusiveStopFilter_descriptor; @@ -8281,6 +8652,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter)other); @@ -8295,7 +8692,8 @@ public final class FilterProtos { if (other.hasStopRowKey()) { setStopRowKey(other.getStopRowKey()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -8312,7 +8710,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8322,7 +8720,6 @@ public final class FilterProtos { } private int bitField0_; - // optional bytes stop_row_key = 1; private com.google.protobuf.ByteString stopRowKey_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes stop_row_key = 1; @@ -8357,22 +8754,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.InclusiveStopFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.InclusiveStopFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter DEFAULT_INSTANCE; static { - defaultInstance = new InclusiveStopFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public InclusiveStopFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new InclusiveStopFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.InclusiveStopFilter) } - public interface KeyOnlyFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface KeyOnlyFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.KeyOnlyFilter) + com.google.protobuf.MessageOrBuilder { - // required bool len_as_val = 1; /** * required bool len_as_val = 1; */ @@ -8385,36 +8819,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.KeyOnlyFilter} */ - public static final class KeyOnlyFilter extends - com.google.protobuf.GeneratedMessage - implements KeyOnlyFilterOrBuilder { + public static final class KeyOnlyFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.KeyOnlyFilter) + KeyOnlyFilterOrBuilder { // Use KeyOnlyFilter.newBuilder() to construct. - private KeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private KeyOnlyFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private KeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final KeyOnlyFilter defaultInstance; - public static KeyOnlyFilter getDefaultInstance() { - return defaultInstance; } - - public KeyOnlyFilter getDefaultInstanceForType() { - return defaultInstance; + private KeyOnlyFilter() { + lenAsVal_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private KeyOnlyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8444,7 +8870,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -8455,30 +8881,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public KeyOnlyFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new KeyOnlyFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool len_as_val = 1; public static final int LEN_AS_VAL_FIELD_NUMBER = 1; private boolean lenAsVal_; /** @@ -8494,13 +8904,11 @@ public final class FilterProtos { return lenAsVal_; } - private void initFields() { - lenAsVal_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasLenAsVal()) { memoizedIsInitialized = 0; @@ -8512,16 +8920,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, lenAsVal_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -8529,19 +8935,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, lenAsVal_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8557,12 +8957,10 @@ public final class FilterProtos { result = result && (getLenAsVal() == other.getLenAsVal()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -8572,9 +8970,10 @@ public final class FilterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLenAsVal()) { hash = (37 * hash) + LEN_AS_VAL_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getLenAsVal()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getLenAsVal()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -8602,46 +9001,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -8649,14 +9059,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.KeyOnlyFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.KeyOnlyFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8669,18 +9080,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); lenAsVal_ = false; @@ -8688,10 +9096,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_KeyOnlyFilter_descriptor; @@ -8722,6 +9126,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter)other); @@ -8736,13 +9166,13 @@ public final class FilterProtos { if (other.hasLenAsVal()) { setLenAsVal(other.getLenAsVal()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasLenAsVal()) { - return false; } return true; @@ -8757,7 +9187,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8767,7 +9197,6 @@ public final class FilterProtos { } private int bitField0_; - // required bool len_as_val = 1; private boolean lenAsVal_ ; /** * required bool len_as_val = 1; @@ -8799,22 +9228,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.KeyOnlyFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.KeyOnlyFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter DEFAULT_INSTANCE; static { - defaultInstance = new KeyOnlyFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public KeyOnlyFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new KeyOnlyFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.KeyOnlyFilter) } - public interface MultipleColumnPrefixFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MultipleColumnPrefixFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MultipleColumnPrefixFilter) + com.google.protobuf.MessageOrBuilder { - // repeated bytes sorted_prefixes = 1; /** * repeated bytes sorted_prefixes = 1; */ @@ -8831,36 +9297,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.MultipleColumnPrefixFilter} */ - public static final class MultipleColumnPrefixFilter extends - com.google.protobuf.GeneratedMessage - implements MultipleColumnPrefixFilterOrBuilder { + public static final class MultipleColumnPrefixFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MultipleColumnPrefixFilter) + MultipleColumnPrefixFilterOrBuilder { // Use MultipleColumnPrefixFilter.newBuilder() to construct. - private MultipleColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private MultipleColumnPrefixFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MultipleColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MultipleColumnPrefixFilter defaultInstance; - public static MultipleColumnPrefixFilter getDefaultInstance() { - return defaultInstance; } - - public MultipleColumnPrefixFilter getDefaultInstanceForType() { - return defaultInstance; + private MultipleColumnPrefixFilter() { + sortedPrefixes_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MultipleColumnPrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8893,7 +9351,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_); @@ -8907,29 +9365,13 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MultipleColumnPrefixFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MultipleColumnPrefixFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated bytes sorted_prefixes = 1; public static final int SORTED_PREFIXES_FIELD_NUMBER = 1; private java.util.List sortedPrefixes_; /** @@ -8952,13 +9394,11 @@ public final class FilterProtos { return sortedPrefixes_.get(index); } - private void initFields() { - sortedPrefixes_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -8966,16 +9406,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < sortedPrefixes_.size(); i++) { output.writeBytes(1, sortedPrefixes_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -8988,19 +9426,13 @@ public final class FilterProtos { size += dataSize; size += 1 * getSortedPrefixesList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9013,12 +9445,10 @@ public final class FilterProtos { boolean result = true; result = result && getSortedPrefixesList() .equals(other.getSortedPrefixesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9030,7 +9460,7 @@ public final class FilterProtos { hash = (37 * hash) + SORTED_PREFIXES_FIELD_NUMBER; hash = (53 * hash) + getSortedPrefixesList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9058,46 +9488,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -9105,14 +9546,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.MultipleColumnPrefixFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MultipleColumnPrefixFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9125,18 +9567,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); sortedPrefixes_ = java.util.Collections.emptyList(); @@ -9144,10 +9583,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor; @@ -9177,6 +9612,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)other); @@ -9198,7 +9659,8 @@ public final class FilterProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -9215,7 +9677,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9225,7 +9687,6 @@ public final class FilterProtos { } private int bitField0_; - // repeated bytes sorted_prefixes = 1; private java.util.List sortedPrefixes_ = java.util.Collections.emptyList(); private void ensureSortedPrefixesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { @@ -9283,7 +9744,8 @@ public final class FilterProtos { public Builder addAllSortedPrefixes( java.lang.Iterable values) { ensureSortedPrefixesIsMutable(); - super.addAll(values, sortedPrefixes_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, sortedPrefixes_); onChanged(); return this; } @@ -9296,22 +9758,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MultipleColumnPrefixFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.MultipleColumnPrefixFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter DEFAULT_INSTANCE; static { - defaultInstance = new MultipleColumnPrefixFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter(); } - // @@protoc_insertion_point(class_scope:hbase.pb.MultipleColumnPrefixFilter) - } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } - public interface PageFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MultipleColumnPrefixFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultipleColumnPrefixFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface PageFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.PageFilter) + com.google.protobuf.MessageOrBuilder { - // required int64 page_size = 1; /** * required int64 page_size = 1; */ @@ -9324,36 +9823,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.PageFilter} */ - public static final class PageFilter extends - com.google.protobuf.GeneratedMessage - implements PageFilterOrBuilder { + public static final class PageFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.PageFilter) + PageFilterOrBuilder { // Use PageFilter.newBuilder() to construct. - private PageFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private PageFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private PageFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final PageFilter defaultInstance; - public static PageFilter getDefaultInstance() { - return defaultInstance; - } - - public PageFilter getDefaultInstanceForType() { - return defaultInstance; + private PageFilter() { + pageSize_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private PageFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -9383,7 +9874,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -9394,30 +9885,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public PageFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new PageFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required int64 page_size = 1; public static final int PAGE_SIZE_FIELD_NUMBER = 1; private long pageSize_; /** @@ -9433,13 +9908,11 @@ public final class FilterProtos { return pageSize_; } - private void initFields() { - pageSize_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasPageSize()) { memoizedIsInitialized = 0; @@ -9451,16 +9924,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, pageSize_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -9468,19 +9939,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, pageSize_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9496,12 +9961,10 @@ public final class FilterProtos { result = result && (getPageSize() == other.getPageSize()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9511,9 +9974,10 @@ public final class FilterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPageSize()) { hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getPageSize()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getPageSize()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9541,46 +10005,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -9588,14 +10063,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.PageFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.PageFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9608,18 +10084,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); pageSize_ = 0L; @@ -9627,10 +10100,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_PageFilter_descriptor; @@ -9661,6 +10130,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter)other); @@ -9675,13 +10170,13 @@ public final class FilterProtos { if (other.hasPageSize()) { setPageSize(other.getPageSize()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasPageSize()) { - return false; } return true; @@ -9696,7 +10191,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9706,7 +10201,6 @@ public final class FilterProtos { } private int bitField0_; - // required int64 page_size = 1; private long pageSize_ ; /** * required int64 page_size = 1; @@ -9738,22 +10232,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.PageFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.PageFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter DEFAULT_INSTANCE; static { - defaultInstance = new PageFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public PageFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PageFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.PageFilter) } - public interface PrefixFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface PrefixFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.PrefixFilter) + com.google.protobuf.MessageOrBuilder { - // optional bytes prefix = 1; /** * optional bytes prefix = 1; */ @@ -9766,36 +10297,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.PrefixFilter} */ - public static final class PrefixFilter extends - com.google.protobuf.GeneratedMessage - implements PrefixFilterOrBuilder { + public static final class PrefixFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.PrefixFilter) + PrefixFilterOrBuilder { // Use PrefixFilter.newBuilder() to construct. - private PrefixFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private PrefixFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private PrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final PrefixFilter defaultInstance; - public static PrefixFilter getDefaultInstance() { - return defaultInstance; } - - public PrefixFilter getDefaultInstanceForType() { - return defaultInstance; + private PrefixFilter() { + prefix_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private PrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -9825,7 +10348,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -9836,30 +10359,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public PrefixFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new PrefixFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bytes prefix = 1; public static final int PREFIX_FIELD_NUMBER = 1; private com.google.protobuf.ByteString prefix_; /** @@ -9875,13 +10382,11 @@ public final class FilterProtos { return prefix_; } - private void initFields() { - prefix_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -9889,16 +10394,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, prefix_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -9906,19 +10409,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, prefix_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9934,12 +10431,10 @@ public final class FilterProtos { result = result && getPrefix() .equals(other.getPrefix()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9951,7 +10446,7 @@ public final class FilterProtos { hash = (37 * hash) + PREFIX_FIELD_NUMBER; hash = (53 * hash) + getPrefix().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9979,46 +10474,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -10026,14 +10532,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.PrefixFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.PrefixFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10046,18 +10553,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); prefix_ = com.google.protobuf.ByteString.EMPTY; @@ -10065,10 +10569,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_PrefixFilter_descriptor; @@ -10099,6 +10599,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter)other); @@ -10113,7 +10639,8 @@ public final class FilterProtos { if (other.hasPrefix()) { setPrefix(other.getPrefix()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -10130,7 +10657,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -10140,7 +10667,6 @@ public final class FilterProtos { } private int bitField0_; - // optional bytes prefix = 1; private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes prefix = 1; @@ -10175,22 +10701,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.PrefixFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.PrefixFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter DEFAULT_INSTANCE; static { - defaultInstance = new PrefixFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public PrefixFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrefixFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.PrefixFilter) } - public interface QualifierFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface QualifierFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.QualifierFilter) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.CompareFilter compare_filter = 1; /** * required .hbase.pb.CompareFilter compare_filter = 1; */ @@ -10207,36 +10770,27 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.QualifierFilter} */ - public static final class QualifierFilter extends - com.google.protobuf.GeneratedMessage - implements QualifierFilterOrBuilder { + public static final class QualifierFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.QualifierFilter) + QualifierFilterOrBuilder { // Use QualifierFilter.newBuilder() to construct. - private QualifierFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private QualifierFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private QualifierFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final QualifierFilter defaultInstance; - public static QualifierFilter getDefaultInstance() { - return defaultInstance; } - - public QualifierFilter getDefaultInstanceForType() { - return defaultInstance; + private QualifierFilter() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private QualifierFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -10274,7 +10828,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -10285,30 +10839,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public QualifierFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new QualifierFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.CompareFilter compare_filter = 1; public static final int COMPARE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_; /** @@ -10321,22 +10859,20 @@ public final class FilterProtos { * required .hbase.pb.CompareFilter compare_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } /** * required .hbase.pb.CompareFilter compare_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } - private void initFields() { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasCompareFilter()) { memoizedIsInitialized = 0; @@ -10352,36 +10888,28 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, compareFilter_); + output.writeMessage(1, getCompareFilter()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, compareFilter_); + .computeMessageSize(1, getCompareFilter()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -10397,12 +10925,10 @@ public final class FilterProtos { result = result && getCompareFilter() .equals(other.getCompareFilter()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -10414,7 +10940,7 @@ public final class FilterProtos { hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getCompareFilter().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -10442,46 +10968,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -10489,14 +11026,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.QualifierFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.QualifierFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10509,23 +11047,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getCompareFilterFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + compareFilter_ = null; } else { compareFilterBuilder_.clear(); } @@ -10533,10 +11068,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_QualifierFilter_descriptor; @@ -10571,6 +11102,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter)other); @@ -10585,17 +11142,16 @@ public final class FilterProtos { if (other.hasCompareFilter()) { mergeCompareFilter(other.getCompareFilter()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasCompareFilter()) { - return false; } if (!getCompareFilter().isInitialized()) { - return false; } return true; @@ -10610,7 +11166,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -10620,9 +11176,8 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.CompareFilter compare_filter = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; /** * required .hbase.pb.CompareFilter compare_filter = 1; @@ -10635,7 +11190,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } else { return compareFilterBuilder_.getMessage(); } @@ -10676,6 +11231,7 @@ public final class FilterProtos { public Builder mergeCompareFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + compareFilter_ != null && compareFilter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); @@ -10694,7 +11250,7 @@ public final class FilterProtos { */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + compareFilter_ = null; onChanged(); } else { compareFilterBuilder_.clear(); @@ -10717,41 +11273,79 @@ public final class FilterProtos { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); } else { - return compareFilter_; + return compareFilter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } } /** * required .hbase.pb.CompareFilter compare_filter = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { if (compareFilterBuilder_ == null) { - compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( - compareFilter_, + getCompareFilter(), getParentForChildren(), isClean()); compareFilter_ = null; } return compareFilterBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.QualifierFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.QualifierFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter DEFAULT_INSTANCE; static { - defaultInstance = new QualifierFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public QualifierFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new QualifierFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.QualifierFilter) } - public interface RandomRowFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RandomRowFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RandomRowFilter) + com.google.protobuf.MessageOrBuilder { - // required float chance = 1; /** * required float chance = 1; */ @@ -10764,36 +11358,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.RandomRowFilter} */ - public static final class RandomRowFilter extends - com.google.protobuf.GeneratedMessage - implements RandomRowFilterOrBuilder { + public static final class RandomRowFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RandomRowFilter) + RandomRowFilterOrBuilder { // Use RandomRowFilter.newBuilder() to construct. - private RandomRowFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private RandomRowFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RandomRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RandomRowFilter defaultInstance; - public static RandomRowFilter getDefaultInstance() { - return defaultInstance; - } - - public RandomRowFilter getDefaultInstanceForType() { - return defaultInstance; + private RandomRowFilter() { + chance_ = 0F; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RandomRowFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -10823,7 +11409,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -10834,30 +11420,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RandomRowFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RandomRowFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required float chance = 1; public static final int CHANCE_FIELD_NUMBER = 1; private float chance_; /** @@ -10873,13 +11443,11 @@ public final class FilterProtos { return chance_; } - private void initFields() { - chance_ = 0F; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasChance()) { memoizedIsInitialized = 0; @@ -10891,16 +11459,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeFloat(1, chance_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -10908,19 +11474,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeFloatSize(1, chance_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -10933,14 +11493,15 @@ public final class FilterProtos { boolean result = true; result = result && (hasChance() == other.hasChance()); if (hasChance()) { - result = result && (Float.floatToIntBits(getChance()) == Float.floatToIntBits(other.getChance())); + result = result && ( + java.lang.Float.floatToIntBits(getChance()) + == java.lang.Float.floatToIntBits( + other.getChance())); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -10950,10 +11511,10 @@ public final class FilterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasChance()) { hash = (37 * hash) + CHANCE_FIELD_NUMBER; - hash = (53 * hash) + Float.floatToIntBits( + hash = (53 * hash) + java.lang.Float.floatToIntBits( getChance()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -10981,46 +11542,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -11028,14 +11600,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.RandomRowFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RandomRowFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11048,18 +11621,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); chance_ = 0F; @@ -11067,10 +11637,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RandomRowFilter_descriptor; @@ -11101,6 +11667,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter)other); @@ -11115,13 +11707,13 @@ public final class FilterProtos { if (other.hasChance()) { setChance(other.getChance()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasChance()) { - return false; } return true; @@ -11136,7 +11728,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11146,7 +11738,6 @@ public final class FilterProtos { } private int bitField0_; - // required float chance = 1; private float chance_ ; /** * required float chance = 1; @@ -11178,22 +11769,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RandomRowFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.RandomRowFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter DEFAULT_INSTANCE; static { - defaultInstance = new RandomRowFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RandomRowFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RandomRowFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RandomRowFilter) } - public interface RowFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RowFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RowFilter) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.CompareFilter compare_filter = 1; /** * required .hbase.pb.CompareFilter compare_filter = 1; */ @@ -11210,36 +11838,27 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.RowFilter} */ - public static final class RowFilter extends - com.google.protobuf.GeneratedMessage - implements RowFilterOrBuilder { + public static final class RowFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RowFilter) + RowFilterOrBuilder { // Use RowFilter.newBuilder() to construct. - private RowFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private RowFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RowFilter defaultInstance; - public static RowFilter getDefaultInstance() { - return defaultInstance; } - - public RowFilter getDefaultInstanceForType() { - return defaultInstance; + private RowFilter() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RowFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -11277,7 +11896,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11288,30 +11907,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RowFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RowFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.CompareFilter compare_filter = 1; public static final int COMPARE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_; /** @@ -11324,22 +11927,20 @@ public final class FilterProtos { * required .hbase.pb.CompareFilter compare_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } /** * required .hbase.pb.CompareFilter compare_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } - private void initFields() { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasCompareFilter()) { memoizedIsInitialized = 0; @@ -11355,36 +11956,28 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, compareFilter_); + output.writeMessage(1, getCompareFilter()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, compareFilter_); + .computeMessageSize(1, getCompareFilter()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -11400,12 +11993,10 @@ public final class FilterProtos { result = result && getCompareFilter() .equals(other.getCompareFilter()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -11417,7 +12008,7 @@ public final class FilterProtos { hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getCompareFilter().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -11445,46 +12036,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -11492,14 +12094,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.RowFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RowFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11512,23 +12115,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getCompareFilterFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + compareFilter_ = null; } else { compareFilterBuilder_.clear(); } @@ -11536,10 +12136,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowFilter_descriptor; @@ -11574,6 +12170,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter)other); @@ -11588,17 +12210,16 @@ public final class FilterProtos { if (other.hasCompareFilter()) { mergeCompareFilter(other.getCompareFilter()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasCompareFilter()) { - return false; } if (!getCompareFilter().isInitialized()) { - return false; } return true; @@ -11613,7 +12234,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11623,9 +12244,8 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.CompareFilter compare_filter = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; /** * required .hbase.pb.CompareFilter compare_filter = 1; @@ -11638,7 +12258,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } else { return compareFilterBuilder_.getMessage(); } @@ -11679,6 +12299,7 @@ public final class FilterProtos { public Builder mergeCompareFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + compareFilter_ != null && compareFilter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); @@ -11697,7 +12318,7 @@ public final class FilterProtos { */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + compareFilter_ = null; onChanged(); } else { compareFilterBuilder_.clear(); @@ -11720,41 +12341,79 @@ public final class FilterProtos { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); } else { - return compareFilter_; + return compareFilter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } } /** * required .hbase.pb.CompareFilter compare_filter = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { if (compareFilterBuilder_ == null) { - compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( - compareFilter_, + getCompareFilter(), getParentForChildren(), isClean()); compareFilter_ = null; } return compareFilterBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RowFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.RowFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter DEFAULT_INSTANCE; static { - defaultInstance = new RowFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RowFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RowFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RowFilter) } - public interface SingleColumnValueExcludeFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SingleColumnValueExcludeFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SingleColumnValueExcludeFilter) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; /** * required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; */ @@ -11771,36 +12430,27 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.SingleColumnValueExcludeFilter} */ - public static final class SingleColumnValueExcludeFilter extends - com.google.protobuf.GeneratedMessage - implements SingleColumnValueExcludeFilterOrBuilder { + public static final class SingleColumnValueExcludeFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SingleColumnValueExcludeFilter) + SingleColumnValueExcludeFilterOrBuilder { // Use SingleColumnValueExcludeFilter.newBuilder() to construct. - private SingleColumnValueExcludeFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private SingleColumnValueExcludeFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SingleColumnValueExcludeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SingleColumnValueExcludeFilter defaultInstance; - public static SingleColumnValueExcludeFilter getDefaultInstance() { - return defaultInstance; } - - public SingleColumnValueExcludeFilter getDefaultInstanceForType() { - return defaultInstance; + private SingleColumnValueExcludeFilter() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SingleColumnValueExcludeFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -11838,7 +12488,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11849,30 +12499,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SingleColumnValueExcludeFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SingleColumnValueExcludeFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; public static final int SINGLE_COLUMN_VALUE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_; /** @@ -11885,22 +12519,20 @@ public final class FilterProtos { * required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() { - return singleColumnValueFilter_; + return singleColumnValueFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance() : singleColumnValueFilter_; } /** * required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() { - return singleColumnValueFilter_; + return singleColumnValueFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance() : singleColumnValueFilter_; } - private void initFields() { - singleColumnValueFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasSingleColumnValueFilter()) { memoizedIsInitialized = 0; @@ -11916,36 +12548,28 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, singleColumnValueFilter_); + output.writeMessage(1, getSingleColumnValueFilter()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, singleColumnValueFilter_); + .computeMessageSize(1, getSingleColumnValueFilter()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -11961,12 +12585,10 @@ public final class FilterProtos { result = result && getSingleColumnValueFilter() .equals(other.getSingleColumnValueFilter()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -11978,7 +12600,7 @@ public final class FilterProtos { hash = (37 * hash) + SINGLE_COLUMN_VALUE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getSingleColumnValueFilter().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -12006,46 +12628,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -12053,14 +12686,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.SingleColumnValueExcludeFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SingleColumnValueExcludeFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -12073,23 +12707,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSingleColumnValueFilterFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (singleColumnValueFilterBuilder_ == null) { - singleColumnValueFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); + singleColumnValueFilter_ = null; } else { singleColumnValueFilterBuilder_.clear(); } @@ -12097,10 +12728,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor; @@ -12135,6 +12762,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)other); @@ -12149,17 +12802,16 @@ public final class FilterProtos { if (other.hasSingleColumnValueFilter()) { mergeSingleColumnValueFilter(other.getSingleColumnValueFilter()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasSingleColumnValueFilter()) { - return false; } if (!getSingleColumnValueFilter().isInitialized()) { - return false; } return true; @@ -12174,7 +12826,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -12184,9 +12836,8 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> singleColumnValueFilterBuilder_; /** * required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; @@ -12199,7 +12850,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() { if (singleColumnValueFilterBuilder_ == null) { - return singleColumnValueFilter_; + return singleColumnValueFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance() : singleColumnValueFilter_; } else { return singleColumnValueFilterBuilder_.getMessage(); } @@ -12240,6 +12891,7 @@ public final class FilterProtos { public Builder mergeSingleColumnValueFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter value) { if (singleColumnValueFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + singleColumnValueFilter_ != null && singleColumnValueFilter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) { singleColumnValueFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder(singleColumnValueFilter_).mergeFrom(value).buildPartial(); @@ -12258,7 +12910,7 @@ public final class FilterProtos { */ public Builder clearSingleColumnValueFilter() { if (singleColumnValueFilterBuilder_ == null) { - singleColumnValueFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance(); + singleColumnValueFilter_ = null; onChanged(); } else { singleColumnValueFilterBuilder_.clear(); @@ -12281,41 +12933,79 @@ public final class FilterProtos { if (singleColumnValueFilterBuilder_ != null) { return singleColumnValueFilterBuilder_.getMessageOrBuilder(); } else { - return singleColumnValueFilter_; + return singleColumnValueFilter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance() : singleColumnValueFilter_; } } /** * required .hbase.pb.SingleColumnValueFilter single_column_value_filter = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> getSingleColumnValueFilterFieldBuilder() { if (singleColumnValueFilterBuilder_ == null) { - singleColumnValueFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + singleColumnValueFilterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder>( - singleColumnValueFilter_, + getSingleColumnValueFilter(), getParentForChildren(), isClean()); singleColumnValueFilter_ = null; } return singleColumnValueFilterBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SingleColumnValueExcludeFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.SingleColumnValueExcludeFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter DEFAULT_INSTANCE; static { - defaultInstance = new SingleColumnValueExcludeFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SingleColumnValueExcludeFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SingleColumnValueExcludeFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SingleColumnValueExcludeFilter) } - public interface SingleColumnValueFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SingleColumnValueFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SingleColumnValueFilter) + com.google.protobuf.MessageOrBuilder { - // optional bytes column_family = 1; /** * optional bytes column_family = 1; */ @@ -12325,7 +13015,6 @@ public final class FilterProtos { */ com.google.protobuf.ByteString getColumnFamily(); - // optional bytes column_qualifier = 2; /** * optional bytes column_qualifier = 2; */ @@ -12335,7 +13024,6 @@ public final class FilterProtos { */ com.google.protobuf.ByteString getColumnQualifier(); - // required .hbase.pb.CompareType compare_op = 3; /** * required .hbase.pb.CompareType compare_op = 3; */ @@ -12345,7 +13033,6 @@ public final class FilterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareOp(); - // required .hbase.pb.Comparator comparator = 4; /** * required .hbase.pb.Comparator comparator = 4; */ @@ -12359,7 +13046,6 @@ public final class FilterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); - // optional bool filter_if_missing = 5; /** * optional bool filter_if_missing = 5; */ @@ -12369,7 +13055,6 @@ public final class FilterProtos { */ boolean getFilterIfMissing(); - // optional bool latest_version_only = 6; /** * optional bool latest_version_only = 6; */ @@ -12382,36 +13067,32 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.SingleColumnValueFilter} */ - public static final class SingleColumnValueFilter extends - com.google.protobuf.GeneratedMessage - implements SingleColumnValueFilterOrBuilder { + public static final class SingleColumnValueFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SingleColumnValueFilter) + SingleColumnValueFilterOrBuilder { // Use SingleColumnValueFilter.newBuilder() to construct. - private SingleColumnValueFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private SingleColumnValueFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private SingleColumnValueFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SingleColumnValueFilter defaultInstance; - public static SingleColumnValueFilter getDefaultInstance() { - return defaultInstance; - } - - public SingleColumnValueFilter getDefaultInstanceForType() { - return defaultInstance; + private SingleColumnValueFilter() { + columnFamily_ = com.google.protobuf.ByteString.EMPTY; + columnQualifier_ = com.google.protobuf.ByteString.EMPTY; + compareOp_ = 0; + filterIfMissing_ = false; + latestVersionOnly_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SingleColumnValueFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -12447,7 +13128,7 @@ public final class FilterProtos { unknownFields.mergeVarintField(3, rawValue); } else { bitField0_ |= 0x00000004; - compareOp_ = value; + compareOp_ = rawValue; } break; } @@ -12480,7 +13161,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -12491,30 +13172,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SingleColumnValueFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SingleColumnValueFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bytes column_family = 1; public static final int COLUMN_FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString columnFamily_; /** @@ -12530,7 +13195,6 @@ public final class FilterProtos { return columnFamily_; } - // optional bytes column_qualifier = 2; public static final int COLUMN_QUALIFIER_FIELD_NUMBER = 2; private com.google.protobuf.ByteString columnQualifier_; /** @@ -12546,9 +13210,8 @@ public final class FilterProtos { return columnQualifier_; } - // required .hbase.pb.CompareType compare_op = 3; public static final int COMPARE_OP_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType compareOp_; + private int compareOp_; /** * required .hbase.pb.CompareType compare_op = 3; */ @@ -12559,10 +13222,10 @@ public final class FilterProtos { * required .hbase.pb.CompareType compare_op = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareOp() { - return compareOp_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.valueOf(compareOp_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS : result; } - // required .hbase.pb.Comparator comparator = 4; public static final int COMPARATOR_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_; /** @@ -12575,16 +13238,15 @@ public final class FilterProtos { * required .hbase.pb.Comparator comparator = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getComparator() { - return comparator_; + return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } /** * required .hbase.pb.Comparator comparator = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { - return comparator_; + return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } - // optional bool filter_if_missing = 5; public static final int FILTER_IF_MISSING_FIELD_NUMBER = 5; private boolean filterIfMissing_; /** @@ -12600,7 +13262,6 @@ public final class FilterProtos { return filterIfMissing_; } - // optional bool latest_version_only = 6; public static final int LATEST_VERSION_ONLY_FIELD_NUMBER = 6; private boolean latestVersionOnly_; /** @@ -12616,18 +13277,11 @@ public final class FilterProtos { return latestVersionOnly_; } - private void initFields() { - columnFamily_ = com.google.protobuf.ByteString.EMPTY; - columnQualifier_ = com.google.protobuf.ByteString.EMPTY; - compareOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; - comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - filterIfMissing_ = false; - latestVersionOnly_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasCompareOp()) { memoizedIsInitialized = 0; @@ -12647,7 +13301,6 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, columnFamily_); } @@ -12655,10 +13308,10 @@ public final class FilterProtos { output.writeBytes(2, columnQualifier_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeEnum(3, compareOp_.getNumber()); + output.writeEnum(3, compareOp_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, comparator_); + output.writeMessage(4, getComparator()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, filterIfMissing_); @@ -12666,12 +13319,11 @@ public final class FilterProtos { if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBool(6, latestVersionOnly_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -12685,11 +13337,11 @@ public final class FilterProtos { } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(3, compareOp_.getNumber()); + .computeEnumSize(3, compareOp_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, comparator_); + .computeMessageSize(4, getComparator()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream @@ -12699,19 +13351,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(6, latestVersionOnly_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -12734,8 +13380,7 @@ public final class FilterProtos { } result = result && (hasCompareOp() == other.hasCompareOp()); if (hasCompareOp()) { - result = result && - (getCompareOp() == other.getCompareOp()); + result = result && compareOp_ == other.compareOp_; } result = result && (hasComparator() == other.hasComparator()); if (hasComparator()) { @@ -12752,12 +13397,10 @@ public final class FilterProtos { result = result && (getLatestVersionOnly() == other.getLatestVersionOnly()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -12775,7 +13418,7 @@ public final class FilterProtos { } if (hasCompareOp()) { hash = (37 * hash) + COMPARE_OP_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getCompareOp()); + hash = (53 * hash) + compareOp_; } if (hasComparator()) { hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; @@ -12783,13 +13426,15 @@ public final class FilterProtos { } if (hasFilterIfMissing()) { hash = (37 * hash) + FILTER_IF_MISSING_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getFilterIfMissing()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getFilterIfMissing()); } if (hasLatestVersionOnly()) { hash = (37 * hash) + LATEST_VERSION_ONLY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getLatestVersionOnly()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getLatestVersionOnly()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -12817,46 +13462,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -12864,14 +13520,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.SingleColumnValueFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SingleColumnValueFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -12884,29 +13541,26 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getComparatorFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); columnFamily_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); columnQualifier_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); - compareOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; + compareOp_ = 0; bitField0_ = (bitField0_ & ~0x00000004); if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + comparator_ = null; } else { comparatorBuilder_.clear(); } @@ -12918,10 +13572,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SingleColumnValueFilter_descriptor; @@ -12976,6 +13626,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter)other); @@ -13005,21 +13681,19 @@ public final class FilterProtos { if (other.hasLatestVersionOnly()) { setLatestVersionOnly(other.getLatestVersionOnly()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasCompareOp()) { - return false; } if (!hasComparator()) { - return false; } if (!getComparator().isInitialized()) { - return false; } return true; @@ -13034,7 +13708,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -13044,7 +13718,6 @@ public final class FilterProtos { } private int bitField0_; - // optional bytes column_family = 1; private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes column_family = 1; @@ -13080,7 +13753,6 @@ public final class FilterProtos { return this; } - // optional bytes column_qualifier = 2; private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes column_qualifier = 2; @@ -13116,8 +13788,7 @@ public final class FilterProtos { return this; } - // required .hbase.pb.CompareType compare_op = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; + private int compareOp_ = 0; /** * required .hbase.pb.CompareType compare_op = 3; */ @@ -13128,7 +13799,8 @@ public final class FilterProtos { * required .hbase.pb.CompareType compare_op = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareOp() { - return compareOp_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.valueOf(compareOp_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS : result; } /** * required .hbase.pb.CompareType compare_op = 3; @@ -13138,7 +13810,7 @@ public final class FilterProtos { throw new NullPointerException(); } bitField0_ |= 0x00000004; - compareOp_ = value; + compareOp_ = value.getNumber(); onChanged(); return this; } @@ -13147,14 +13819,13 @@ public final class FilterProtos { */ public Builder clearCompareOp() { bitField0_ = (bitField0_ & ~0x00000004); - compareOp_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS; + compareOp_ = 0; onChanged(); return this; } - // required .hbase.pb.Comparator comparator = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; /** * required .hbase.pb.Comparator comparator = 4; @@ -13167,7 +13838,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getComparator() { if (comparatorBuilder_ == null) { - return comparator_; + return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } else { return comparatorBuilder_.getMessage(); } @@ -13208,6 +13879,7 @@ public final class FilterProtos { public Builder mergeComparator(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + comparator_ != null && comparator_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); @@ -13226,7 +13898,7 @@ public final class FilterProtos { */ public Builder clearComparator() { if (comparatorBuilder_ == null) { - comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); + comparator_ = null; onChanged(); } else { comparatorBuilder_.clear(); @@ -13249,19 +13921,20 @@ public final class FilterProtos { if (comparatorBuilder_ != null) { return comparatorBuilder_.getMessageOrBuilder(); } else { - return comparator_; + return comparator_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } } /** * required .hbase.pb.Comparator comparator = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder() { if (comparatorBuilder_ == null) { - comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( - comparator_, + getComparator(), getParentForChildren(), isClean()); comparator_ = null; @@ -13269,7 +13942,6 @@ public final class FilterProtos { return comparatorBuilder_; } - // optional bool filter_if_missing = 5; private boolean filterIfMissing_ ; /** * optional bool filter_if_missing = 5; @@ -13302,7 +13974,6 @@ public final class FilterProtos { return this; } - // optional bool latest_version_only = 6; private boolean latestVersionOnly_ ; /** * optional bool latest_version_only = 6; @@ -13334,22 +14005,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SingleColumnValueFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.SingleColumnValueFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter DEFAULT_INSTANCE; static { - defaultInstance = new SingleColumnValueFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SingleColumnValueFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SingleColumnValueFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SingleColumnValueFilter) } - public interface SkipFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SkipFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SkipFilter) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.Filter filter = 1; /** * required .hbase.pb.Filter filter = 1; */ @@ -13366,36 +14074,27 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.SkipFilter} */ - public static final class SkipFilter extends - com.google.protobuf.GeneratedMessage - implements SkipFilterOrBuilder { + public static final class SkipFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SkipFilter) + SkipFilterOrBuilder { // Use SkipFilter.newBuilder() to construct. - private SkipFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private SkipFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SkipFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SkipFilter defaultInstance; - public static SkipFilter getDefaultInstance() { - return defaultInstance; } - - public SkipFilter getDefaultInstanceForType() { - return defaultInstance; + private SkipFilter() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SkipFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -13433,7 +14132,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -13444,30 +14143,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SkipFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SkipFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.Filter filter = 1; public static final int FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_; /** @@ -13480,22 +14163,20 @@ public final class FilterProtos { * required .hbase.pb.Filter filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } /** * required .hbase.pb.Filter filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } - private void initFields() { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFilter()) { memoizedIsInitialized = 0; @@ -13511,36 +14192,28 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, filter_); + output.writeMessage(1, getFilter()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, filter_); + .computeMessageSize(1, getFilter()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -13556,12 +14229,10 @@ public final class FilterProtos { result = result && getFilter() .equals(other.getFilter()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -13573,7 +14244,7 @@ public final class FilterProtos { hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -13601,46 +14272,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -13648,14 +14330,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.SkipFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SkipFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -13668,23 +14351,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getFilterFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); + filter_ = null; } else { filterBuilder_.clear(); } @@ -13692,10 +14372,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_SkipFilter_descriptor; @@ -13730,6 +14406,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter)other); @@ -13744,17 +14446,16 @@ public final class FilterProtos { if (other.hasFilter()) { mergeFilter(other.getFilter()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFilter()) { - return false; } if (!getFilter().isInitialized()) { - return false; } return true; @@ -13769,7 +14470,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -13779,9 +14480,8 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.Filter filter = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; /** * required .hbase.pb.Filter filter = 1; @@ -13794,7 +14494,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { if (filterBuilder_ == null) { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } else { return filterBuilder_.getMessage(); } @@ -13835,6 +14535,7 @@ public final class FilterProtos { public Builder mergeFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + filter_ != null && filter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); @@ -13853,7 +14554,7 @@ public final class FilterProtos { */ public Builder clearFilter() { if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); + filter_ = null; onChanged(); } else { filterBuilder_.clear(); @@ -13876,41 +14577,79 @@ public final class FilterProtos { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { - return filter_; + return filter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } } /** * required .hbase.pb.Filter filter = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + filterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder>( - filter_, + getFilter(), getParentForChildren(), isClean()); filter_ = null; } return filterBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SkipFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.SkipFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter DEFAULT_INSTANCE; static { - defaultInstance = new SkipFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SkipFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SkipFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SkipFilter) } - public interface TimestampsFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TimestampsFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TimestampsFilter) + com.google.protobuf.MessageOrBuilder { - // repeated int64 timestamps = 1 [packed = true]; /** * repeated int64 timestamps = 1 [packed = true]; */ @@ -13924,7 +14663,6 @@ public final class FilterProtos { */ long getTimestamps(int index); - // optional bool can_hint = 2; /** * optional bool can_hint = 2; */ @@ -13937,36 +14675,29 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.TimestampsFilter} */ - public static final class TimestampsFilter extends - com.google.protobuf.GeneratedMessage - implements TimestampsFilterOrBuilder { + public static final class TimestampsFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TimestampsFilter) + TimestampsFilterOrBuilder { // Use TimestampsFilter.newBuilder() to construct. - private TimestampsFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private TimestampsFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private TimestampsFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TimestampsFilter defaultInstance; - public static TimestampsFilter getDefaultInstance() { - return defaultInstance; - } - - public TimestampsFilter getDefaultInstanceForType() { - return defaultInstance; + private TimestampsFilter() { + timestamps_ = java.util.Collections.emptyList(); + canHint_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TimestampsFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -14017,7 +14748,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { timestamps_ = java.util.Collections.unmodifiableList(timestamps_); @@ -14031,30 +14762,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TimestampsFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TimestampsFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // repeated int64 timestamps = 1 [packed = true]; public static final int TIMESTAMPS_FIELD_NUMBER = 1; private java.util.List timestamps_; /** @@ -14078,7 +14793,6 @@ public final class FilterProtos { } private int timestampsMemoizedSerializedSize = -1; - // optional bool can_hint = 2; public static final int CAN_HINT_FIELD_NUMBER = 2; private boolean canHint_; /** @@ -14094,14 +14808,11 @@ public final class FilterProtos { return canHint_; } - private void initFields() { - timestamps_ = java.util.Collections.emptyList(); - canHint_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -14111,8 +14822,8 @@ public final class FilterProtos { throws java.io.IOException { getSerializedSize(); if (getTimestampsList().size() > 0) { - output.writeRawVarint32(10); - output.writeRawVarint32(timestampsMemoizedSerializedSize); + output.writeUInt32NoTag(10); + output.writeUInt32NoTag(timestampsMemoizedSerializedSize); } for (int i = 0; i < timestamps_.size(); i++) { output.writeInt64NoTag(timestamps_.get(i)); @@ -14120,12 +14831,11 @@ public final class FilterProtos { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(2, canHint_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -14147,19 +14857,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, canHint_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -14177,12 +14881,10 @@ public final class FilterProtos { result = result && (getCanHint() == other.getCanHint()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -14196,9 +14898,10 @@ public final class FilterProtos { } if (hasCanHint()) { hash = (37 * hash) + CAN_HINT_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCanHint()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getCanHint()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -14226,46 +14929,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -14273,14 +14987,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.TimestampsFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TimestampsFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -14293,18 +15008,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); timestamps_ = java.util.Collections.emptyList(); @@ -14314,10 +15026,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_TimestampsFilter_descriptor; @@ -14353,6 +15061,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter)other); @@ -14377,7 +15111,8 @@ public final class FilterProtos { if (other.hasCanHint()) { setCanHint(other.getCanHint()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -14394,7 +15129,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -14404,7 +15139,6 @@ public final class FilterProtos { } private int bitField0_; - // repeated int64 timestamps = 1 [packed = true]; private java.util.List timestamps_ = java.util.Collections.emptyList(); private void ensureTimestampsIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { @@ -14456,7 +15190,8 @@ public final class FilterProtos { public Builder addAllTimestamps( java.lang.Iterable values) { ensureTimestampsIsMutable(); - super.addAll(values, timestamps_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, timestamps_); onChanged(); return this; } @@ -14470,7 +15205,6 @@ public final class FilterProtos { return this; } - // optional bool can_hint = 2; private boolean canHint_ ; /** * optional bool can_hint = 2; @@ -14502,22 +15236,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TimestampsFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.TimestampsFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter DEFAULT_INSTANCE; static { - defaultInstance = new TimestampsFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TimestampsFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TimestampsFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TimestampsFilter) } - public interface ValueFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ValueFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ValueFilter) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.CompareFilter compare_filter = 1; /** * required .hbase.pb.CompareFilter compare_filter = 1; */ @@ -14534,36 +15305,27 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.ValueFilter} */ - public static final class ValueFilter extends - com.google.protobuf.GeneratedMessage - implements ValueFilterOrBuilder { + public static final class ValueFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ValueFilter) + ValueFilterOrBuilder { // Use ValueFilter.newBuilder() to construct. - private ValueFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private ValueFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ValueFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ValueFilter defaultInstance; - public static ValueFilter getDefaultInstance() { - return defaultInstance; } - - public ValueFilter getDefaultInstanceForType() { - return defaultInstance; + private ValueFilter() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ValueFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -14601,7 +15363,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -14612,30 +15374,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ValueFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ValueFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.CompareFilter compare_filter = 1; public static final int COMPARE_FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_; /** @@ -14648,22 +15394,20 @@ public final class FilterProtos { * required .hbase.pb.CompareFilter compare_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } /** * required .hbase.pb.CompareFilter compare_filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } - private void initFields() { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasCompareFilter()) { memoizedIsInitialized = 0; @@ -14679,36 +15423,28 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, compareFilter_); + output.writeMessage(1, getCompareFilter()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, compareFilter_); + .computeMessageSize(1, getCompareFilter()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -14724,12 +15460,10 @@ public final class FilterProtos { result = result && getCompareFilter() .equals(other.getCompareFilter()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -14741,7 +15475,7 @@ public final class FilterProtos { hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; hash = (53 * hash) + getCompareFilter().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -14769,46 +15503,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -14816,14 +15561,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.ValueFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ValueFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -14836,23 +15582,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getCompareFilterFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + compareFilter_ = null; } else { compareFilterBuilder_.clear(); } @@ -14860,10 +15603,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_ValueFilter_descriptor; @@ -14898,6 +15637,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter)other); @@ -14912,17 +15677,16 @@ public final class FilterProtos { if (other.hasCompareFilter()) { mergeCompareFilter(other.getCompareFilter()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasCompareFilter()) { - return false; } if (!getCompareFilter().isInitialized()) { - return false; } return true; @@ -14937,7 +15701,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -14947,9 +15711,8 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.CompareFilter compare_filter = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; /** * required .hbase.pb.CompareFilter compare_filter = 1; @@ -14962,7 +15725,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { if (compareFilterBuilder_ == null) { - return compareFilter_; + return compareFilter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } else { return compareFilterBuilder_.getMessage(); } @@ -15003,6 +15766,7 @@ public final class FilterProtos { public Builder mergeCompareFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter value) { if (compareFilterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + compareFilter_ != null && compareFilter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); @@ -15021,7 +15785,7 @@ public final class FilterProtos { */ public Builder clearCompareFilter() { if (compareFilterBuilder_ == null) { - compareFilter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); + compareFilter_ = null; onChanged(); } else { compareFilterBuilder_.clear(); @@ -15044,41 +15808,79 @@ public final class FilterProtos { if (compareFilterBuilder_ != null) { return compareFilterBuilder_.getMessageOrBuilder(); } else { - return compareFilter_; + return compareFilter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance() : compareFilter_; } } /** * required .hbase.pb.CompareFilter compare_filter = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder() { if (compareFilterBuilder_ == null) { - compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( - compareFilter_, + getCompareFilter(), getParentForChildren(), isClean()); compareFilter_ = null; } return compareFilterBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ValueFilter) } - static { - defaultInstance = new ValueFilter(true); - defaultInstance.initFields(); + // @@protoc_insertion_point(class_scope:hbase.pb.ValueFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ValueFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ValueFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ValueFilter) } - public interface WhileMatchFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface WhileMatchFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.WhileMatchFilter) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.Filter filter = 1; /** * required .hbase.pb.Filter filter = 1; */ @@ -15095,36 +15897,27 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.WhileMatchFilter} */ - public static final class WhileMatchFilter extends - com.google.protobuf.GeneratedMessage - implements WhileMatchFilterOrBuilder { + public static final class WhileMatchFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.WhileMatchFilter) + WhileMatchFilterOrBuilder { // Use WhileMatchFilter.newBuilder() to construct. - private WhileMatchFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private WhileMatchFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private WhileMatchFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final WhileMatchFilter defaultInstance; - public static WhileMatchFilter getDefaultInstance() { - return defaultInstance; } - - public WhileMatchFilter getDefaultInstanceForType() { - return defaultInstance; + private WhileMatchFilter() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private WhileMatchFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -15162,7 +15955,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -15173,30 +15966,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WhileMatchFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new WhileMatchFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.Filter filter = 1; public static final int FILTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_; /** @@ -15209,22 +15986,20 @@ public final class FilterProtos { * required .hbase.pb.Filter filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } /** * required .hbase.pb.Filter filter = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } - private void initFields() { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFilter()) { memoizedIsInitialized = 0; @@ -15240,36 +16015,28 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, filter_); + output.writeMessage(1, getFilter()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, filter_); + .computeMessageSize(1, getFilter()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -15285,12 +16052,10 @@ public final class FilterProtos { result = result && getFilter() .equals(other.getFilter()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -15302,7 +16067,7 @@ public final class FilterProtos { hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -15330,46 +16095,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -15377,14 +16153,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.WhileMatchFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.WhileMatchFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -15397,23 +16174,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getFilterFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); + filter_ = null; } else { filterBuilder_.clear(); } @@ -15421,10 +16195,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_WhileMatchFilter_descriptor; @@ -15459,6 +16229,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter)other); @@ -15473,17 +16269,16 @@ public final class FilterProtos { if (other.hasFilter()) { mergeFilter(other.getFilter()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFilter()) { - return false; } if (!getFilter().isInitialized()) { - return false; } return true; @@ -15498,7 +16293,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15508,9 +16303,8 @@ public final class FilterProtos { } private int bitField0_; - // required .hbase.pb.Filter filter = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; /** * required .hbase.pb.Filter filter = 1; @@ -15523,7 +16317,7 @@ public final class FilterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { if (filterBuilder_ == null) { - return filter_; + return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } else { return filterBuilder_.getMessage(); } @@ -15564,6 +16358,7 @@ public final class FilterProtos { public Builder mergeFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + filter_ != null && filter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); @@ -15582,7 +16377,7 @@ public final class FilterProtos { */ public Builder clearFilter() { if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); + filter_ = null; onChanged(); } else { filterBuilder_.clear(); @@ -15605,73 +16400,103 @@ public final class FilterProtos { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { - return filter_; + return filter_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } } /** * required .hbase.pb.Filter filter = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + filterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder>( - filter_, + getFilter(), getParentForChildren(), isClean()); filter_ = null; } return filterBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.WhileMatchFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.WhileMatchFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter DEFAULT_INSTANCE; static { - defaultInstance = new WhileMatchFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public WhileMatchFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WhileMatchFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.WhileMatchFilter) } - public interface FilterAllFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FilterAllFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FilterAllFilter) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.FilterAllFilter} */ - public static final class FilterAllFilter extends - com.google.protobuf.GeneratedMessage - implements FilterAllFilterOrBuilder { + public static final class FilterAllFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FilterAllFilter) + FilterAllFilterOrBuilder { // Use FilterAllFilter.newBuilder() to construct. - private FilterAllFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private FilterAllFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FilterAllFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FilterAllFilter defaultInstance; - public static FilterAllFilter getDefaultInstance() { - return defaultInstance; } - - public FilterAllFilter getDefaultInstanceForType() { - return defaultInstance; + private FilterAllFilter() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FilterAllFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -15695,7 +16520,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -15706,34 +16531,18 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FilterAllFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FilterAllFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -15741,29 +16550,21 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -15774,12 +16575,10 @@ public final class FilterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter other = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -15787,7 +16586,7 @@ public final class FilterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -15815,46 +16614,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -15862,14 +16672,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.FilterAllFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FilterAllFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -15882,27 +16693,20 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_FilterAllFilter_descriptor; @@ -15926,6 +16730,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter)other); @@ -15937,7 +16767,8 @@ public final class FilterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -15954,7 +16785,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15962,22 +16793,59 @@ public final class FilterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FilterAllFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.FilterAllFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter DEFAULT_INSTANCE; static { - defaultInstance = new FilterAllFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FilterAllFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FilterAllFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FilterAllFilter) } - public interface RowRangeOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RowRangeOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RowRange) + com.google.protobuf.MessageOrBuilder { - // optional bytes start_row = 1; /** * optional bytes start_row = 1; */ @@ -15987,7 +16855,6 @@ public final class FilterProtos { */ com.google.protobuf.ByteString getStartRow(); - // optional bool start_row_inclusive = 2; /** * optional bool start_row_inclusive = 2; */ @@ -15997,7 +16864,6 @@ public final class FilterProtos { */ boolean getStartRowInclusive(); - // optional bytes stop_row = 3; /** * optional bytes stop_row = 3; */ @@ -16007,7 +16873,6 @@ public final class FilterProtos { */ com.google.protobuf.ByteString getStopRow(); - // optional bool stop_row_inclusive = 4; /** * optional bool stop_row_inclusive = 4; */ @@ -16020,36 +16885,31 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.RowRange} */ - public static final class RowRange extends - com.google.protobuf.GeneratedMessage - implements RowRangeOrBuilder { + public static final class RowRange extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RowRange) + RowRangeOrBuilder { // Use RowRange.newBuilder() to construct. - private RowRange(com.google.protobuf.GeneratedMessage.Builder builder) { + private RowRange(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RowRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RowRange defaultInstance; - public static RowRange getDefaultInstance() { - return defaultInstance; } - - public RowRange getDefaultInstanceForType() { - return defaultInstance; + private RowRange() { + startRow_ = com.google.protobuf.ByteString.EMPTY; + startRowInclusive_ = false; + stopRow_ = com.google.protobuf.ByteString.EMPTY; + stopRowInclusive_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RowRange( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -16094,7 +16954,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -16105,30 +16965,14 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RowRange parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RowRange(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bytes start_row = 1; public static final int START_ROW_FIELD_NUMBER = 1; private com.google.protobuf.ByteString startRow_; /** @@ -16144,7 +16988,6 @@ public final class FilterProtos { return startRow_; } - // optional bool start_row_inclusive = 2; public static final int START_ROW_INCLUSIVE_FIELD_NUMBER = 2; private boolean startRowInclusive_; /** @@ -16160,7 +17003,6 @@ public final class FilterProtos { return startRowInclusive_; } - // optional bytes stop_row = 3; public static final int STOP_ROW_FIELD_NUMBER = 3; private com.google.protobuf.ByteString stopRow_; /** @@ -16176,7 +17018,6 @@ public final class FilterProtos { return stopRow_; } - // optional bool stop_row_inclusive = 4; public static final int STOP_ROW_INCLUSIVE_FIELD_NUMBER = 4; private boolean stopRowInclusive_; /** @@ -16192,16 +17033,11 @@ public final class FilterProtos { return stopRowInclusive_; } - private void initFields() { - startRow_ = com.google.protobuf.ByteString.EMPTY; - startRowInclusive_ = false; - stopRow_ = com.google.protobuf.ByteString.EMPTY; - stopRowInclusive_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -16209,7 +17045,6 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, startRow_); } @@ -16222,12 +17057,11 @@ public final class FilterProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(4, stopRowInclusive_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -16247,19 +17081,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, stopRowInclusive_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -16290,12 +17118,10 @@ public final class FilterProtos { result = result && (getStopRowInclusive() == other.getStopRowInclusive()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -16309,7 +17135,8 @@ public final class FilterProtos { } if (hasStartRowInclusive()) { hash = (37 * hash) + START_ROW_INCLUSIVE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getStartRowInclusive()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getStartRowInclusive()); } if (hasStopRow()) { hash = (37 * hash) + STOP_ROW_FIELD_NUMBER; @@ -16317,9 +17144,10 @@ public final class FilterProtos { } if (hasStopRowInclusive()) { hash = (37 * hash) + STOP_ROW_INCLUSIVE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getStopRowInclusive()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getStopRowInclusive()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -16347,46 +17175,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -16394,14 +17233,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.RowRange} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRangeOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RowRange) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRangeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -16414,18 +17254,15 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); startRow_ = com.google.protobuf.ByteString.EMPTY; @@ -16439,10 +17276,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_RowRange_descriptor; @@ -16485,6 +17318,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange)other); @@ -16508,7 +17367,8 @@ public final class FilterProtos { if (other.hasStopRowInclusive()) { setStopRowInclusive(other.getStopRowInclusive()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -16525,7 +17385,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -16535,7 +17395,6 @@ public final class FilterProtos { } private int bitField0_; - // optional bytes start_row = 1; private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes start_row = 1; @@ -16571,7 +17430,6 @@ public final class FilterProtos { return this; } - // optional bool start_row_inclusive = 2; private boolean startRowInclusive_ ; /** * optional bool start_row_inclusive = 2; @@ -16604,7 +17462,6 @@ public final class FilterProtos { return this; } - // optional bytes stop_row = 3; private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes stop_row = 3; @@ -16640,7 +17497,6 @@ public final class FilterProtos { return this; } - // optional bool stop_row_inclusive = 4; private boolean stopRowInclusive_ ; /** * optional bool stop_row_inclusive = 4; @@ -16672,22 +17528,59 @@ public final class FilterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RowRange) } + // @@protoc_insertion_point(class_scope:hbase.pb.RowRange) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange DEFAULT_INSTANCE; static { - defaultInstance = new RowRange(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RowRange parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RowRange(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RowRange) } - public interface MultiRowRangeFilterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MultiRowRangeFilterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MultiRowRangeFilter) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.RowRange row_range_list = 1; /** * repeated .hbase.pb.RowRange row_range_list = 1; */ @@ -16715,36 +17608,28 @@ public final class FilterProtos { /** * Protobuf type {@code hbase.pb.MultiRowRangeFilter} */ - public static final class MultiRowRangeFilter extends - com.google.protobuf.GeneratedMessage - implements MultiRowRangeFilterOrBuilder { + public static final class MultiRowRangeFilter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MultiRowRangeFilter) + MultiRowRangeFilterOrBuilder { // Use MultiRowRangeFilter.newBuilder() to construct. - private MultiRowRangeFilter(com.google.protobuf.GeneratedMessage.Builder builder) { + private MultiRowRangeFilter(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MultiRowRangeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MultiRowRangeFilter defaultInstance; - public static MultiRowRangeFilter getDefaultInstance() { - return defaultInstance; } - - public MultiRowRangeFilter getDefaultInstanceForType() { - return defaultInstance; + private MultiRowRangeFilter() { + rowRangeList_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MultiRowRangeFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -16768,7 +17653,8 @@ public final class FilterProtos { rowRangeList_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - rowRangeList_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange.PARSER, extensionRegistry)); + rowRangeList_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange.PARSER, extensionRegistry)); break; } } @@ -16777,7 +17663,7 @@ public final class FilterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { rowRangeList_ = java.util.Collections.unmodifiableList(rowRangeList_); @@ -16791,29 +17677,13 @@ public final class FilterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter.class, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MultiRowRangeFilter parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MultiRowRangeFilter(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.RowRange row_range_list = 1; public static final int ROW_RANGE_LIST_FIELD_NUMBER = 1; private java.util.List rowRangeList_; /** @@ -16849,13 +17719,11 @@ public final class FilterProtos { return rowRangeList_.get(index); } - private void initFields() { - rowRangeList_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -16863,16 +17731,14 @@ public final class FilterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < rowRangeList_.size(); i++) { output.writeMessage(1, rowRangeList_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -16880,19 +17746,13 @@ public final class FilterProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, rowRangeList_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -16905,12 +17765,10 @@ public final class FilterProtos { boolean result = true; result = result && getRowRangeListList() .equals(other.getRowRangeListList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -16922,7 +17780,7 @@ public final class FilterProtos { hash = (37 * hash) + ROW_RANGE_LIST_FIELD_NUMBER; hash = (53 * hash) + getRowRangeListList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -16950,46 +17808,57 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -16997,14 +17866,15 @@ public final class FilterProtos { * Protobuf type {@code hbase.pb.MultiRowRangeFilter} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MultiRowRangeFilter) + org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -17017,19 +17887,16 @@ public final class FilterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRowRangeListFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (rowRangeListBuilder_ == null) { @@ -17041,10 +17908,6 @@ public final class FilterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.internal_static_hbase_pb_MultiRowRangeFilter_descriptor; @@ -17078,6 +17941,32 @@ public final class FilterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter)other); @@ -17108,14 +17997,15 @@ public final class FilterProtos { rowRangeList_ = other.rowRangeList_; bitField0_ = (bitField0_ & ~0x00000001); rowRangeListBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRowRangeListFieldBuilder() : null; } else { rowRangeListBuilder_.addAllMessages(other.rowRangeList_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -17132,7 +18022,7 @@ public final class FilterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -17142,7 +18032,6 @@ public final class FilterProtos { } private int bitField0_; - // repeated .hbase.pb.RowRange row_range_list = 1; private java.util.List rowRangeList_ = java.util.Collections.emptyList(); private void ensureRowRangeListIsMutable() { @@ -17152,7 +18041,7 @@ public final class FilterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRangeOrBuilder> rowRangeListBuilder_; /** @@ -17284,7 +18173,8 @@ public final class FilterProtos { java.lang.Iterable values) { if (rowRangeListBuilder_ == null) { ensureRowRangeListIsMutable(); - super.addAll(values, rowRangeList_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, rowRangeList_); onChanged(); } else { rowRangeListBuilder_.addAllMessages(values); @@ -17367,11 +18257,11 @@ public final class FilterProtos { getRowRangeListBuilderList() { return getRowRangeListFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRangeOrBuilder> getRowRangeListFieldBuilder() { if (rowRangeListBuilder_ == null) { - rowRangeListBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + rowRangeListBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRangeOrBuilder>( rowRangeList_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -17381,174 +18271,211 @@ public final class FilterProtos { } return rowRangeListBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MultiRowRangeFilter) } + // @@protoc_insertion_point(class_scope:hbase.pb.MultiRowRangeFilter) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter DEFAULT_INSTANCE; static { - defaultInstance = new MultiRowRangeFilter(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MultiRowRangeFilter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiRowRangeFilter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MultiRowRangeFilter) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Filter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Filter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnCountGetFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnPaginationFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnPrefixFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnRangeFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CompareFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CompareFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DependentColumnFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FamilyFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FamilyFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FilterList_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FilterList_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FilterWrapper_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FilterWrapper_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FuzzyRowFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_InclusiveStopFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_KeyOnlyFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_PageFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_PageFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_PrefixFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_PrefixFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_QualifierFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_QualifierFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RandomRowFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RowFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RowFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SingleColumnValueFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SkipFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SkipFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TimestampsFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ValueFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ValueFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WhileMatchFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FilterAllFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RowRange_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RowRange_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MultiRowRangeFilter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -17611,199 +18538,201 @@ public final class FilterProtos { "\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_Filter_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_Filter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Filter_descriptor, - new java.lang.String[] { "Name", "SerializedFilter", }); - internal_static_hbase_pb_ColumnCountGetFilter_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ColumnCountGetFilter_descriptor, - new java.lang.String[] { "Limit", }); - internal_static_hbase_pb_ColumnPaginationFilter_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ColumnPaginationFilter_descriptor, - new java.lang.String[] { "Limit", "Offset", "ColumnOffset", }); - internal_static_hbase_pb_ColumnPrefixFilter_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ColumnPrefixFilter_descriptor, - new java.lang.String[] { "Prefix", }); - internal_static_hbase_pb_ColumnRangeFilter_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ColumnRangeFilter_descriptor, - new java.lang.String[] { "MinColumn", "MinColumnInclusive", "MaxColumn", "MaxColumnInclusive", }); - internal_static_hbase_pb_CompareFilter_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_CompareFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CompareFilter_descriptor, - new java.lang.String[] { "CompareOp", "Comparator", }); - internal_static_hbase_pb_DependentColumnFilter_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DependentColumnFilter_descriptor, - new java.lang.String[] { "CompareFilter", "ColumnFamily", "ColumnQualifier", "DropDependentColumn", }); - internal_static_hbase_pb_FamilyFilter_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_FamilyFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FamilyFilter_descriptor, - new java.lang.String[] { "CompareFilter", }); - internal_static_hbase_pb_FilterList_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_FilterList_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FilterList_descriptor, - new java.lang.String[] { "Operator", "Filters", }); - internal_static_hbase_pb_FilterWrapper_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_hbase_pb_FilterWrapper_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FilterWrapper_descriptor, - new java.lang.String[] { "Filter", }); - internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor, - new java.lang.String[] { "Qualifiers", }); - internal_static_hbase_pb_FuzzyRowFilter_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FuzzyRowFilter_descriptor, - new java.lang.String[] { "FuzzyKeysData", }); - internal_static_hbase_pb_InclusiveStopFilter_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_InclusiveStopFilter_descriptor, - new java.lang.String[] { "StopRowKey", }); - internal_static_hbase_pb_KeyOnlyFilter_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_KeyOnlyFilter_descriptor, - new java.lang.String[] { "LenAsVal", }); - internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor, - new java.lang.String[] { "SortedPrefixes", }); - internal_static_hbase_pb_PageFilter_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_hbase_pb_PageFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_PageFilter_descriptor, - new java.lang.String[] { "PageSize", }); - internal_static_hbase_pb_PrefixFilter_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_hbase_pb_PrefixFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_PrefixFilter_descriptor, - new java.lang.String[] { "Prefix", }); - internal_static_hbase_pb_QualifierFilter_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_hbase_pb_QualifierFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_QualifierFilter_descriptor, - new java.lang.String[] { "CompareFilter", }); - internal_static_hbase_pb_RandomRowFilter_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RandomRowFilter_descriptor, - new java.lang.String[] { "Chance", }); - internal_static_hbase_pb_RowFilter_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_hbase_pb_RowFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RowFilter_descriptor, - new java.lang.String[] { "CompareFilter", }); - internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor, - new java.lang.String[] { "SingleColumnValueFilter", }); - internal_static_hbase_pb_SingleColumnValueFilter_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SingleColumnValueFilter_descriptor, - new java.lang.String[] { "ColumnFamily", "ColumnQualifier", "CompareOp", "Comparator", "FilterIfMissing", "LatestVersionOnly", }); - internal_static_hbase_pb_SkipFilter_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_hbase_pb_SkipFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SkipFilter_descriptor, - new java.lang.String[] { "Filter", }); - internal_static_hbase_pb_TimestampsFilter_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TimestampsFilter_descriptor, - new java.lang.String[] { "Timestamps", "CanHint", }); - internal_static_hbase_pb_ValueFilter_descriptor = - getDescriptor().getMessageTypes().get(25); - internal_static_hbase_pb_ValueFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ValueFilter_descriptor, - new java.lang.String[] { "CompareFilter", }); - internal_static_hbase_pb_WhileMatchFilter_descriptor = - getDescriptor().getMessageTypes().get(26); - internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_WhileMatchFilter_descriptor, - new java.lang.String[] { "Filter", }); - internal_static_hbase_pb_FilterAllFilter_descriptor = - getDescriptor().getMessageTypes().get(27); - internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FilterAllFilter_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_RowRange_descriptor = - getDescriptor().getMessageTypes().get(28); - internal_static_hbase_pb_RowRange_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RowRange_descriptor, - new java.lang.String[] { "StartRow", "StartRowInclusive", "StopRow", "StopRowInclusive", }); - internal_static_hbase_pb_MultiRowRangeFilter_descriptor = - getDescriptor().getMessageTypes().get(29); - internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MultiRowRangeFilter_descriptor, - new java.lang.String[] { "RowRangeList", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_Filter_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_Filter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Filter_descriptor, + new java.lang.String[] { "Name", "SerializedFilter", }); + internal_static_hbase_pb_ColumnCountGetFilter_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_ColumnCountGetFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ColumnCountGetFilter_descriptor, + new java.lang.String[] { "Limit", }); + internal_static_hbase_pb_ColumnPaginationFilter_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_ColumnPaginationFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ColumnPaginationFilter_descriptor, + new java.lang.String[] { "Limit", "Offset", "ColumnOffset", }); + internal_static_hbase_pb_ColumnPrefixFilter_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_ColumnPrefixFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ColumnPrefixFilter_descriptor, + new java.lang.String[] { "Prefix", }); + internal_static_hbase_pb_ColumnRangeFilter_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_ColumnRangeFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ColumnRangeFilter_descriptor, + new java.lang.String[] { "MinColumn", "MinColumnInclusive", "MaxColumn", "MaxColumnInclusive", }); + internal_static_hbase_pb_CompareFilter_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_CompareFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CompareFilter_descriptor, + new java.lang.String[] { "CompareOp", "Comparator", }); + internal_static_hbase_pb_DependentColumnFilter_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_DependentColumnFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DependentColumnFilter_descriptor, + new java.lang.String[] { "CompareFilter", "ColumnFamily", "ColumnQualifier", "DropDependentColumn", }); + internal_static_hbase_pb_FamilyFilter_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_FamilyFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FamilyFilter_descriptor, + new java.lang.String[] { "CompareFilter", }); + internal_static_hbase_pb_FilterList_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_FilterList_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FilterList_descriptor, + new java.lang.String[] { "Operator", "Filters", }); + internal_static_hbase_pb_FilterWrapper_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_hbase_pb_FilterWrapper_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FilterWrapper_descriptor, + new java.lang.String[] { "Filter", }); + internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_hbase_pb_FirstKeyOnlyFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FirstKeyOnlyFilter_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FirstKeyValueMatchingQualifiersFilter_descriptor, + new java.lang.String[] { "Qualifiers", }); + internal_static_hbase_pb_FuzzyRowFilter_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_hbase_pb_FuzzyRowFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FuzzyRowFilter_descriptor, + new java.lang.String[] { "FuzzyKeysData", }); + internal_static_hbase_pb_InclusiveStopFilter_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_hbase_pb_InclusiveStopFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_InclusiveStopFilter_descriptor, + new java.lang.String[] { "StopRowKey", }); + internal_static_hbase_pb_KeyOnlyFilter_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_hbase_pb_KeyOnlyFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_KeyOnlyFilter_descriptor, + new java.lang.String[] { "LenAsVal", }); + internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_hbase_pb_MultipleColumnPrefixFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MultipleColumnPrefixFilter_descriptor, + new java.lang.String[] { "SortedPrefixes", }); + internal_static_hbase_pb_PageFilter_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_hbase_pb_PageFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_PageFilter_descriptor, + new java.lang.String[] { "PageSize", }); + internal_static_hbase_pb_PrefixFilter_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_hbase_pb_PrefixFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_PrefixFilter_descriptor, + new java.lang.String[] { "Prefix", }); + internal_static_hbase_pb_QualifierFilter_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_hbase_pb_QualifierFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_QualifierFilter_descriptor, + new java.lang.String[] { "CompareFilter", }); + internal_static_hbase_pb_RandomRowFilter_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_hbase_pb_RandomRowFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RandomRowFilter_descriptor, + new java.lang.String[] { "Chance", }); + internal_static_hbase_pb_RowFilter_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_hbase_pb_RowFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RowFilter_descriptor, + new java.lang.String[] { "CompareFilter", }); + internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_hbase_pb_SingleColumnValueExcludeFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SingleColumnValueExcludeFilter_descriptor, + new java.lang.String[] { "SingleColumnValueFilter", }); + internal_static_hbase_pb_SingleColumnValueFilter_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_hbase_pb_SingleColumnValueFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SingleColumnValueFilter_descriptor, + new java.lang.String[] { "ColumnFamily", "ColumnQualifier", "CompareOp", "Comparator", "FilterIfMissing", "LatestVersionOnly", }); + internal_static_hbase_pb_SkipFilter_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_hbase_pb_SkipFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SkipFilter_descriptor, + new java.lang.String[] { "Filter", }); + internal_static_hbase_pb_TimestampsFilter_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_hbase_pb_TimestampsFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TimestampsFilter_descriptor, + new java.lang.String[] { "Timestamps", "CanHint", }); + internal_static_hbase_pb_ValueFilter_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_hbase_pb_ValueFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ValueFilter_descriptor, + new java.lang.String[] { "CompareFilter", }); + internal_static_hbase_pb_WhileMatchFilter_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_hbase_pb_WhileMatchFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_WhileMatchFilter_descriptor, + new java.lang.String[] { "Filter", }); + internal_static_hbase_pb_FilterAllFilter_descriptor = + getDescriptor().getMessageTypes().get(27); + internal_static_hbase_pb_FilterAllFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FilterAllFilter_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_RowRange_descriptor = + getDescriptor().getMessageTypes().get(28); + internal_static_hbase_pb_RowRange_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RowRange_descriptor, + new java.lang.String[] { "StartRow", "StartRowInclusive", "StopRow", "StopRowInclusive", }); + internal_static_hbase_pb_MultiRowRangeFilter_descriptor = + getDescriptor().getMessageTypes().get(29); + internal_static_hbase_pb_MultiRowRangeFilter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MultiRowRangeFilter_descriptor, + new java.lang.String[] { "RowRangeList", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java index 1259d3c..2ae9f9c 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java @@ -6,45 +6,51 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class HBaseProtos { private HBaseProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } /** - * Protobuf enum {@code hbase.pb.CompareType} - * *
    * Comparison operators 
    * 
+ * + * Protobuf enum {@code hbase.pb.CompareType} */ public enum CompareType implements com.google.protobuf.ProtocolMessageEnum { /** * LESS = 0; */ - LESS(0, 0), + LESS(0), /** * LESS_OR_EQUAL = 1; */ - LESS_OR_EQUAL(1, 1), + LESS_OR_EQUAL(1), /** * EQUAL = 2; */ - EQUAL(2, 2), + EQUAL(2), /** * NOT_EQUAL = 3; */ - NOT_EQUAL(3, 3), + NOT_EQUAL(3), /** * GREATER_OR_EQUAL = 4; */ - GREATER_OR_EQUAL(4, 4), + GREATER_OR_EQUAL(4), /** * GREATER = 5; */ - GREATER(5, 5), + GREATER(5), /** * NO_OP = 6; */ - NO_OP(6, 6), + NO_OP(6), ; /** @@ -77,9 +83,19 @@ public final class HBaseProtos { public static final int NO_OP_VALUE = 6; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static CompareType valueOf(int value) { + return forNumber(value); + } + + public static CompareType forNumber(int value) { switch (value) { case 0: return LESS; case 1: return LESS_OR_EQUAL; @@ -96,17 +112,17 @@ public final class HBaseProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + CompareType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public CompareType findValueByNumber(int number) { - return CompareType.valueOf(number); + return CompareType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -128,11 +144,9 @@ public final class HBaseProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private CompareType(int index, int value) { - this.index = index; + private CompareType(int value) { this.value = value; } @@ -147,31 +161,31 @@ public final class HBaseProtos { /** * NANOSECONDS = 1; */ - NANOSECONDS(0, 1), + NANOSECONDS(1), /** * MICROSECONDS = 2; */ - MICROSECONDS(1, 2), + MICROSECONDS(2), /** * MILLISECONDS = 3; */ - MILLISECONDS(2, 3), + MILLISECONDS(3), /** * SECONDS = 4; */ - SECONDS(3, 4), + SECONDS(4), /** * MINUTES = 5; */ - MINUTES(4, 5), + MINUTES(5), /** * HOURS = 6; */ - HOURS(5, 6), + HOURS(6), /** * DAYS = 7; */ - DAYS(6, 7), + DAYS(7), ; /** @@ -204,9 +218,19 @@ public final class HBaseProtos { public static final int DAYS_VALUE = 7; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static TimeUnit valueOf(int value) { + return forNumber(value); + } + + public static TimeUnit forNumber(int value) { switch (value) { case 1: return NANOSECONDS; case 2: return MICROSECONDS; @@ -223,17 +247,17 @@ public final class HBaseProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + TimeUnit> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public TimeUnit findValueByNumber(int number) { - return TimeUnit.valueOf(number); + return TimeUnit.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -255,21 +279,19 @@ public final class HBaseProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private TimeUnit(int index, int value) { - this.index = index; + private TimeUnit(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.TimeUnit) } - public interface TableNameOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TableNameOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TableName) + com.google.protobuf.MessageOrBuilder { - // required bytes namespace = 1; /** * required bytes namespace = 1; */ @@ -279,7 +301,6 @@ public final class HBaseProtos { */ com.google.protobuf.ByteString getNamespace(); - // required bytes qualifier = 2; /** * required bytes qualifier = 2; */ @@ -290,43 +311,36 @@ public final class HBaseProtos { com.google.protobuf.ByteString getQualifier(); } /** - * Protobuf type {@code hbase.pb.TableName} - * *
    **
    * Table Name
    * 
+ * + * Protobuf type {@code hbase.pb.TableName} */ - public static final class TableName extends - com.google.protobuf.GeneratedMessage - implements TableNameOrBuilder { + public static final class TableName extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TableName) + TableNameOrBuilder { // Use TableName.newBuilder() to construct. - private TableName(com.google.protobuf.GeneratedMessage.Builder builder) { + private TableName(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TableName(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TableName defaultInstance; - public static TableName getDefaultInstance() { - return defaultInstance; } - - public TableName getDefaultInstanceForType() { - return defaultInstance; + private TableName() { + namespace_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TableName( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -361,7 +375,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -372,30 +386,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TableName parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TableName(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes namespace = 1; public static final int NAMESPACE_FIELD_NUMBER = 1; private com.google.protobuf.ByteString namespace_; /** @@ -411,7 +409,6 @@ public final class HBaseProtos { return namespace_; } - // required bytes qualifier = 2; public static final int QUALIFIER_FIELD_NUMBER = 2; private com.google.protobuf.ByteString qualifier_; /** @@ -427,14 +424,11 @@ public final class HBaseProtos { return qualifier_; } - private void initFields() { - namespace_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespace()) { memoizedIsInitialized = 0; @@ -450,19 +444,17 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, namespace_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, qualifier_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -474,19 +466,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, qualifier_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -507,12 +493,10 @@ public final class HBaseProtos { result = result && getQualifier() .equals(other.getQualifier()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -528,7 +512,7 @@ public final class HBaseProtos { hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; hash = (53 * hash) + getQualifier().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -556,66 +540,78 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.TableName} - * *
      **
      * Table Name
      * 
+ * + * Protobuf type {@code hbase.pb.TableName} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TableName) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -628,18 +624,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); namespace_ = com.google.protobuf.ByteString.EMPTY; @@ -649,10 +642,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor; @@ -687,6 +676,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName)other); @@ -704,17 +719,16 @@ public final class HBaseProtos { if (other.hasQualifier()) { setQualifier(other.getQualifier()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespace()) { - return false; } if (!hasQualifier()) { - return false; } return true; @@ -729,7 +743,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -739,7 +753,6 @@ public final class HBaseProtos { } private int bitField0_; - // required bytes namespace = 1; private com.google.protobuf.ByteString namespace_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes namespace = 1; @@ -775,7 +788,6 @@ public final class HBaseProtos { return this; } - // required bytes qualifier = 2; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes qualifier = 2; @@ -810,22 +822,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TableName) } + // @@protoc_insertion_point(class_scope:hbase.pb.TableName) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName DEFAULT_INSTANCE; static { - defaultInstance = new TableName(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TableName parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TableName(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TableName) } - public interface TableSchemaOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TableSchemaOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TableSchema) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.TableName table_name = 1; /** * optional .hbase.pb.TableName table_name = 1; */ @@ -839,7 +888,6 @@ public final class HBaseProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // repeated .hbase.pb.BytesBytesPair attributes = 2; /** * repeated .hbase.pb.BytesBytesPair attributes = 2; */ @@ -864,7 +912,6 @@ public final class HBaseProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index); - // repeated .hbase.pb.ColumnFamilySchema column_families = 3; /** * repeated .hbase.pb.ColumnFamilySchema column_families = 3; */ @@ -889,7 +936,6 @@ public final class HBaseProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( int index); - // repeated .hbase.pb.NameStringPair configuration = 4; /** * repeated .hbase.pb.NameStringPair configuration = 4; */ @@ -915,44 +961,38 @@ public final class HBaseProtos { int index); } /** - * Protobuf type {@code hbase.pb.TableSchema} - * *
    **
    * Table Schema
    * Inspired by the rest TableSchema
    * 
+ * + * Protobuf type {@code hbase.pb.TableSchema} */ - public static final class TableSchema extends - com.google.protobuf.GeneratedMessage - implements TableSchemaOrBuilder { + public static final class TableSchema extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TableSchema) + TableSchemaOrBuilder { // Use TableSchema.newBuilder() to construct. - private TableSchema(com.google.protobuf.GeneratedMessage.Builder builder) { + private TableSchema(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private TableSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TableSchema defaultInstance; - public static TableSchema getDefaultInstance() { - return defaultInstance; - } - - public TableSchema getDefaultInstanceForType() { - return defaultInstance; + private TableSchema() { + attributes_ = java.util.Collections.emptyList(); + columnFamilies_ = java.util.Collections.emptyList(); + configuration_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TableSchema( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -989,7 +1029,8 @@ public final class HBaseProtos { attributes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - attributes_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); + attributes_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } case 26: { @@ -997,7 +1038,8 @@ public final class HBaseProtos { columnFamilies_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - columnFamilies_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry)); + columnFamilies_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry)); break; } case 34: { @@ -1005,7 +1047,8 @@ public final class HBaseProtos { configuration_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } - configuration_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); + configuration_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } @@ -1014,7 +1057,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); @@ -1034,30 +1077,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TableSchema parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TableSchema(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -1070,16 +1097,15 @@ public final class HBaseProtos { * optional .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * optional .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // repeated .hbase.pb.BytesBytesPair attributes = 2; public static final int ATTRIBUTES_FIELD_NUMBER = 2; private java.util.List attributes_; /** @@ -1115,7 +1141,6 @@ public final class HBaseProtos { return attributes_.get(index); } - // repeated .hbase.pb.ColumnFamilySchema column_families = 3; public static final int COLUMN_FAMILIES_FIELD_NUMBER = 3; private java.util.List columnFamilies_; /** @@ -1151,7 +1176,6 @@ public final class HBaseProtos { return columnFamilies_.get(index); } - // repeated .hbase.pb.NameStringPair configuration = 4; public static final int CONFIGURATION_FIELD_NUMBER = 4; private java.util.List configuration_; /** @@ -1187,16 +1211,11 @@ public final class HBaseProtos { return configuration_.get(index); } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - attributes_ = java.util.Collections.emptyList(); - columnFamilies_ = java.util.Collections.emptyList(); - configuration_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasTableName()) { if (!getTableName().isInitialized()) { @@ -1228,9 +1247,8 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } for (int i = 0; i < attributes_.size(); i++) { output.writeMessage(2, attributes_.get(i)); @@ -1241,18 +1259,17 @@ public final class HBaseProtos { for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(4, configuration_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } for (int i = 0; i < attributes_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -1266,19 +1283,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, configuration_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1300,12 +1311,10 @@ public final class HBaseProtos { .equals(other.getColumnFamiliesList()); result = result && getConfigurationList() .equals(other.getConfigurationList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1329,7 +1338,7 @@ public final class HBaseProtos { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1357,67 +1366,79 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.TableSchema} - * *
      **
      * Table Schema
      * Inspired by the rest TableSchema
      * 
+ * + * Protobuf type {@code hbase.pb.TableSchema} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TableSchema) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1430,26 +1451,23 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getAttributesFieldBuilder(); getColumnFamiliesFieldBuilder(); getConfigurationFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -1475,10 +1493,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor; @@ -1540,6 +1554,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema)other); @@ -1573,7 +1613,7 @@ public final class HBaseProtos { attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000002); attributesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributesFieldBuilder() : null; } else { attributesBuilder_.addAllMessages(other.attributes_); @@ -1599,7 +1639,7 @@ public final class HBaseProtos { columnFamilies_ = other.columnFamilies_; bitField0_ = (bitField0_ & ~0x00000004); columnFamiliesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getColumnFamiliesFieldBuilder() : null; } else { columnFamiliesBuilder_.addAllMessages(other.columnFamilies_); @@ -1625,39 +1665,36 @@ public final class HBaseProtos { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); configurationBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasTableName()) { if (!getTableName().isInitialized()) { - return false; } } for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { - return false; } } for (int i = 0; i < getColumnFamiliesCount(); i++) { if (!getColumnFamilies(i).isInitialized()) { - return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { - return false; } } @@ -1673,7 +1710,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1683,9 +1720,8 @@ public final class HBaseProtos { } private int bitField0_; - // optional .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * optional .hbase.pb.TableName table_name = 1; @@ -1698,7 +1734,7 @@ public final class HBaseProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -1739,6 +1775,7 @@ public final class HBaseProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -1757,7 +1794,7 @@ public final class HBaseProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -1780,19 +1817,20 @@ public final class HBaseProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * optional .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -1800,7 +1838,6 @@ public final class HBaseProtos { return tableNameBuilder_; } - // repeated .hbase.pb.BytesBytesPair attributes = 2; private java.util.List attributes_ = java.util.Collections.emptyList(); private void ensureAttributesIsMutable() { @@ -1810,7 +1847,7 @@ public final class HBaseProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_; /** @@ -1942,7 +1979,8 @@ public final class HBaseProtos { java.lang.Iterable values) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); - super.addAll(values, attributes_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, attributes_); onChanged(); } else { attributesBuilder_.addAllMessages(values); @@ -2025,11 +2063,11 @@ public final class HBaseProtos { getAttributesBuilderList() { return getAttributesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesFieldBuilder() { if (attributesBuilder_ == null) { - attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( attributes_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -2040,7 +2078,6 @@ public final class HBaseProtos { return attributesBuilder_; } - // repeated .hbase.pb.ColumnFamilySchema column_families = 3; private java.util.List columnFamilies_ = java.util.Collections.emptyList(); private void ensureColumnFamiliesIsMutable() { @@ -2050,7 +2087,7 @@ public final class HBaseProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; /** @@ -2182,7 +2219,8 @@ public final class HBaseProtos { java.lang.Iterable values) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); - super.addAll(values, columnFamilies_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, columnFamilies_); onChanged(); } else { columnFamiliesBuilder_.addAllMessages(values); @@ -2265,11 +2303,11 @@ public final class HBaseProtos { getColumnFamiliesBuilderList() { return getColumnFamiliesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { if (columnFamiliesBuilder_ == null) { - columnFamiliesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + columnFamiliesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( columnFamilies_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -2280,7 +2318,6 @@ public final class HBaseProtos { return columnFamiliesBuilder_; } - // repeated .hbase.pb.NameStringPair configuration = 4; private java.util.List configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { @@ -2290,7 +2327,7 @@ public final class HBaseProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** @@ -2422,7 +2459,8 @@ public final class HBaseProtos { java.lang.Iterable values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); - super.addAll(values, configuration_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); @@ -2505,11 +2543,11 @@ public final class HBaseProtos { getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { - configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000008) == 0x00000008), @@ -2519,76 +2557,105 @@ public final class HBaseProtos { } return configurationBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TableSchema) } + // @@protoc_insertion_point(class_scope:hbase.pb.TableSchema) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema DEFAULT_INSTANCE; static { - defaultInstance = new TableSchema(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TableSchema parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TableSchema(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TableSchema) } - public interface TableStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TableStateOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TableState) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableState.State state = 1; /** - * required .hbase.pb.TableState.State state = 1; - * *
      * This is the table's state.
      * 
+ * + * required .hbase.pb.TableState.State state = 1; */ boolean hasState(); /** - * required .hbase.pb.TableState.State state = 1; - * *
      * This is the table's state.
      * 
+ * + * required .hbase.pb.TableState.State state = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State getState(); } /** - * Protobuf type {@code hbase.pb.TableState} - * *
    ** Denotes state of the table 
    * 
+ * + * Protobuf type {@code hbase.pb.TableState} */ - public static final class TableState extends - com.google.protobuf.GeneratedMessage - implements TableStateOrBuilder { + public static final class TableState extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TableState) + TableStateOrBuilder { // Use TableState.newBuilder() to construct. - private TableState(com.google.protobuf.GeneratedMessage.Builder builder) { + private TableState(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TableState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TableState defaultInstance; - public static TableState getDefaultInstance() { - return defaultInstance; } - - public TableState getDefaultInstanceForType() { - return defaultInstance; + private TableState() { + state_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TableState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2614,7 +2681,7 @@ public final class HBaseProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - state_ = value; + state_ = rawValue; } break; } @@ -2624,7 +2691,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2635,53 +2702,38 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TableState parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TableState(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** - * Protobuf enum {@code hbase.pb.TableState.State} - * *
      * Table's current state
      * 
+ * + * Protobuf enum {@code hbase.pb.TableState.State} */ public enum State implements com.google.protobuf.ProtocolMessageEnum { /** * ENABLED = 0; */ - ENABLED(0, 0), + ENABLED(0), /** * DISABLED = 1; */ - DISABLED(1, 1), + DISABLED(1), /** * DISABLING = 2; */ - DISABLING(2, 2), + DISABLING(2), /** * ENABLING = 3; */ - ENABLING(3, 3), + ENABLING(3), ; /** @@ -2702,9 +2754,19 @@ public final class HBaseProtos { public static final int ENABLING_VALUE = 3; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static State valueOf(int value) { + return forNumber(value); + } + + public static State forNumber(int value) { switch (value) { case 0: return ENABLED; case 1: return DISABLED; @@ -2718,17 +2780,17 @@ public final class HBaseProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public State findValueByNumber(int number) { - return State.valueOf(number); + return State.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -2750,11 +2812,9 @@ public final class HBaseProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private State(int index, int value) { - this.index = index; + private State(int value) { this.value = value; } @@ -2762,37 +2822,35 @@ public final class HBaseProtos { } private int bitField0_; - // required .hbase.pb.TableState.State state = 1; public static final int STATE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State state_; + private int state_; /** - * required .hbase.pb.TableState.State state = 1; - * *
      * This is the table's state.
      * 
+ * + * required .hbase.pb.TableState.State state = 1; */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.TableState.State state = 1; - * *
      * This is the table's state.
      * 
+ * + * required .hbase.pb.TableState.State state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED : result; } - private void initFields() { - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasState()) { memoizedIsInitialized = 0; @@ -2804,36 +2862,28 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, state_.getNumber()); + output.writeEnum(1, state_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, state_.getNumber()); + .computeEnumSize(1, state_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2846,15 +2896,12 @@ public final class HBaseProtos { boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { - result = result && - (getState() == other.getState()); + result = result && state_ == other.state_; } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2864,9 +2911,9 @@ public final class HBaseProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); + hash = (53 * hash) + state_; } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2894,65 +2941,77 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.TableState} - * *
      ** Denotes state of the table 
      * 
+ * + * Protobuf type {@code hbase.pb.TableState} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableStateOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TableState) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2965,29 +3024,22 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED; + state_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor; @@ -3018,6 +3070,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState)other); @@ -3032,13 +3110,13 @@ public final class HBaseProtos { if (other.hasState()) { setState(other.getState()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasState()) { - return false; } return true; @@ -3053,7 +3131,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3063,74 +3141,111 @@ public final class HBaseProtos { } private int bitField0_; - // required .hbase.pb.TableState.State state = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED; + private int state_ = 0; /** - * required .hbase.pb.TableState.State state = 1; - * *
        * This is the table's state.
        * 
+ * + * required .hbase.pb.TableState.State state = 1; */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.TableState.State state = 1; - * *
        * This is the table's state.
        * 
+ * + * required .hbase.pb.TableState.State state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED : result; } /** - * required .hbase.pb.TableState.State state = 1; - * *
        * This is the table's state.
        * 
+ * + * required .hbase.pb.TableState.State state = 1; */ public Builder setState(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - state_ = value; + state_ = value.getNumber(); onChanged(); return this; } /** - * required .hbase.pb.TableState.State state = 1; - * *
        * This is the table's state.
        * 
+ * + * required .hbase.pb.TableState.State state = 1; */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED; + state_ = 0; onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TableState) } + // @@protoc_insertion_point(class_scope:hbase.pb.TableState) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState DEFAULT_INSTANCE; static { - defaultInstance = new TableState(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState(); } - // @@protoc_insertion_point(class_scope:hbase.pb.TableState) - } - - public interface ColumnFamilySchemaOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState getDefaultInstance() { + return DEFAULT_INSTANCE; + } - // required bytes name = 1; - /** + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TableState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TableState(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ColumnFamilySchemaOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ColumnFamilySchema) + com.google.protobuf.MessageOrBuilder { + + /** * required bytes name = 1; */ boolean hasName(); @@ -3139,7 +3254,6 @@ public final class HBaseProtos { */ com.google.protobuf.ByteString getName(); - // repeated .hbase.pb.BytesBytesPair attributes = 2; /** * repeated .hbase.pb.BytesBytesPair attributes = 2; */ @@ -3164,7 +3278,6 @@ public final class HBaseProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index); - // repeated .hbase.pb.NameStringPair configuration = 3; /** * repeated .hbase.pb.NameStringPair configuration = 3; */ @@ -3190,44 +3303,38 @@ public final class HBaseProtos { int index); } /** - * Protobuf type {@code hbase.pb.ColumnFamilySchema} - * *
    **
    * Column Family Schema
    * Inspired by the rest ColumSchemaMessage
    * 
+ * + * Protobuf type {@code hbase.pb.ColumnFamilySchema} */ - public static final class ColumnFamilySchema extends - com.google.protobuf.GeneratedMessage - implements ColumnFamilySchemaOrBuilder { + public static final class ColumnFamilySchema extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ColumnFamilySchema) + ColumnFamilySchemaOrBuilder { // Use ColumnFamilySchema.newBuilder() to construct. - private ColumnFamilySchema(com.google.protobuf.GeneratedMessage.Builder builder) { + private ColumnFamilySchema(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ColumnFamilySchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnFamilySchema defaultInstance; - public static ColumnFamilySchema getDefaultInstance() { - return defaultInstance; } - - public ColumnFamilySchema getDefaultInstanceForType() { - return defaultInstance; + private ColumnFamilySchema() { + name_ = com.google.protobuf.ByteString.EMPTY; + attributes_ = java.util.Collections.emptyList(); + configuration_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ColumnFamilySchema( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3256,7 +3363,8 @@ public final class HBaseProtos { attributes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - attributes_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); + attributes_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } case 26: { @@ -3264,7 +3372,8 @@ public final class HBaseProtos { configuration_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - configuration_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); + configuration_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } @@ -3273,7 +3382,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); @@ -3290,30 +3399,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnFamilySchema parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnFamilySchema(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes name = 1; public static final int NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString name_; /** @@ -3329,7 +3422,6 @@ public final class HBaseProtos { return name_; } - // repeated .hbase.pb.BytesBytesPair attributes = 2; public static final int ATTRIBUTES_FIELD_NUMBER = 2; private java.util.List attributes_; /** @@ -3365,7 +3457,6 @@ public final class HBaseProtos { return attributes_.get(index); } - // repeated .hbase.pb.NameStringPair configuration = 3; public static final int CONFIGURATION_FIELD_NUMBER = 3; private java.util.List configuration_; /** @@ -3401,15 +3492,11 @@ public final class HBaseProtos { return configuration_.get(index); } - private void initFields() { - name_ = com.google.protobuf.ByteString.EMPTY; - attributes_ = java.util.Collections.emptyList(); - configuration_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; @@ -3433,7 +3520,6 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, name_); } @@ -3443,12 +3529,11 @@ public final class HBaseProtos { for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(3, configuration_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -3464,19 +3549,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, configuration_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3496,12 +3575,10 @@ public final class HBaseProtos { .equals(other.getAttributesList()); result = result && getConfigurationList() .equals(other.getConfigurationList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3521,7 +3598,7 @@ public final class HBaseProtos { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3549,67 +3626,79 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ColumnFamilySchema} - * *
      **
      * Column Family Schema
      * Inspired by the rest ColumSchemaMessage
      * 
+ * + * Protobuf type {@code hbase.pb.ColumnFamilySchema} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ColumnFamilySchema) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3622,20 +3711,17 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getAttributesFieldBuilder(); getConfigurationFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); name_ = com.google.protobuf.ByteString.EMPTY; @@ -3655,10 +3741,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_descriptor; @@ -3707,6 +3789,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema)other); @@ -3740,7 +3848,7 @@ public final class HBaseProtos { attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000002); attributesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributesFieldBuilder() : null; } else { attributesBuilder_.addAllMessages(other.attributes_); @@ -3766,31 +3874,29 @@ public final class HBaseProtos { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000004); configurationBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { - return false; } for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { - return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { - return false; } } @@ -3806,7 +3912,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3816,7 +3922,6 @@ public final class HBaseProtos { } private int bitField0_; - // required bytes name = 1; private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes name = 1; @@ -3852,7 +3957,6 @@ public final class HBaseProtos { return this; } - // repeated .hbase.pb.BytesBytesPair attributes = 2; private java.util.List attributes_ = java.util.Collections.emptyList(); private void ensureAttributesIsMutable() { @@ -3862,7 +3966,7 @@ public final class HBaseProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_; /** @@ -3994,7 +4098,8 @@ public final class HBaseProtos { java.lang.Iterable values) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); - super.addAll(values, attributes_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, attributes_); onChanged(); } else { attributesBuilder_.addAllMessages(values); @@ -4077,11 +4182,11 @@ public final class HBaseProtos { getAttributesBuilderList() { return getAttributesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesFieldBuilder() { if (attributesBuilder_ == null) { - attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( attributes_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -4092,7 +4197,6 @@ public final class HBaseProtos { return attributesBuilder_; } - // repeated .hbase.pb.NameStringPair configuration = 3; private java.util.List configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { @@ -4102,7 +4206,7 @@ public final class HBaseProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** @@ -4234,7 +4338,8 @@ public final class HBaseProtos { java.lang.Iterable values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); - super.addAll(values, configuration_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); @@ -4317,11 +4422,11 @@ public final class HBaseProtos { getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { - configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -4331,22 +4436,59 @@ public final class HBaseProtos { } return configurationBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnFamilySchema) } + // @@protoc_insertion_point(class_scope:hbase.pb.ColumnFamilySchema) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema DEFAULT_INSTANCE; static { - defaultInstance = new ColumnFamilySchema(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ColumnFamilySchema parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnFamilySchema(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ColumnFamilySchema) } - public interface RegionInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionInfo) + com.google.protobuf.MessageOrBuilder { - // required uint64 region_id = 1; /** * required uint64 region_id = 1; */ @@ -4356,7 +4498,6 @@ public final class HBaseProtos { */ long getRegionId(); - // required .hbase.pb.TableName table_name = 2; /** * required .hbase.pb.TableName table_name = 2; */ @@ -4370,7 +4511,6 @@ public final class HBaseProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // optional bytes start_key = 3; /** * optional bytes start_key = 3; */ @@ -4380,7 +4520,6 @@ public final class HBaseProtos { */ com.google.protobuf.ByteString getStartKey(); - // optional bytes end_key = 4; /** * optional bytes end_key = 4; */ @@ -4390,7 +4529,6 @@ public final class HBaseProtos { */ com.google.protobuf.ByteString getEndKey(); - // optional bool offline = 5; /** * optional bool offline = 5; */ @@ -4400,7 +4538,6 @@ public final class HBaseProtos { */ boolean getOffline(); - // optional bool split = 6; /** * optional bool split = 6; */ @@ -4410,7 +4547,6 @@ public final class HBaseProtos { */ boolean getSplit(); - // optional int32 replica_id = 7 [default = 0]; /** * optional int32 replica_id = 7 [default = 0]; */ @@ -4421,43 +4557,40 @@ public final class HBaseProtos { int getReplicaId(); } /** - * Protobuf type {@code hbase.pb.RegionInfo} - * *
    **
    * Protocol buffer version of HRegionInfo.
    * 
+ * + * Protobuf type {@code hbase.pb.RegionInfo} */ - public static final class RegionInfo extends - com.google.protobuf.GeneratedMessage - implements RegionInfoOrBuilder { + public static final class RegionInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionInfo) + RegionInfoOrBuilder { // Use RegionInfo.newBuilder() to construct. - private RegionInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionInfo defaultInstance; - public static RegionInfo getDefaultInstance() { - return defaultInstance; } - - public RegionInfo getDefaultInstanceForType() { - return defaultInstance; + private RegionInfo() { + regionId_ = 0L; + startKey_ = com.google.protobuf.ByteString.EMPTY; + endKey_ = com.google.protobuf.ByteString.EMPTY; + offline_ = false; + split_ = false; + replicaId_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4525,7 +4658,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -4536,30 +4669,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionInfo parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionInfo(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint64 region_id = 1; public static final int REGION_ID_FIELD_NUMBER = 1; private long regionId_; /** @@ -4575,7 +4692,6 @@ public final class HBaseProtos { return regionId_; } - // required .hbase.pb.TableName table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -4588,16 +4704,15 @@ public final class HBaseProtos { * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // optional bytes start_key = 3; public static final int START_KEY_FIELD_NUMBER = 3; private com.google.protobuf.ByteString startKey_; /** @@ -4613,7 +4728,6 @@ public final class HBaseProtos { return startKey_; } - // optional bytes end_key = 4; public static final int END_KEY_FIELD_NUMBER = 4; private com.google.protobuf.ByteString endKey_; /** @@ -4629,7 +4743,6 @@ public final class HBaseProtos { return endKey_; } - // optional bool offline = 5; public static final int OFFLINE_FIELD_NUMBER = 5; private boolean offline_; /** @@ -4645,7 +4758,6 @@ public final class HBaseProtos { return offline_; } - // optional bool split = 6; public static final int SPLIT_FIELD_NUMBER = 6; private boolean split_; /** @@ -4661,7 +4773,6 @@ public final class HBaseProtos { return split_; } - // optional int32 replica_id = 7 [default = 0]; public static final int REPLICA_ID_FIELD_NUMBER = 7; private int replicaId_; /** @@ -4677,19 +4788,11 @@ public final class HBaseProtos { return replicaId_; } - private void initFields() { - regionId_ = 0L; - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - startKey_ = com.google.protobuf.ByteString.EMPTY; - endKey_ = com.google.protobuf.ByteString.EMPTY; - offline_ = false; - split_ = false; - replicaId_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegionId()) { memoizedIsInitialized = 0; @@ -4709,12 +4812,11 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, regionId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableName_); + output.writeMessage(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, startKey_); @@ -4731,12 +4833,11 @@ public final class HBaseProtos { if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeInt32(7, replicaId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -4746,7 +4847,7 @@ public final class HBaseProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableName_); + .computeMessageSize(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -4768,19 +4869,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeInt32Size(7, replicaId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4826,12 +4921,10 @@ public final class HBaseProtos { result = result && (getReplicaId() == other.getReplicaId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4841,7 +4934,8 @@ public final class HBaseProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegionId()) { hash = (37 * hash) + REGION_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getRegionId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getRegionId()); } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; @@ -4857,17 +4951,19 @@ public final class HBaseProtos { } if (hasOffline()) { hash = (37 * hash) + OFFLINE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getOffline()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getOffline()); } if (hasSplit()) { hash = (37 * hash) + SPLIT_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getSplit()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getSplit()); } if (hasReplicaId()) { hash = (37 * hash) + REPLICA_ID_FIELD_NUMBER; hash = (53 * hash) + getReplicaId(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4895,66 +4991,78 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RegionInfo} - * *
      **
      * Protocol buffer version of HRegionInfo.
      * 
+ * + * Protobuf type {@code hbase.pb.RegionInfo} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionInfo) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4967,25 +5075,22 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); regionId_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -5003,10 +5108,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_descriptor; @@ -5065,6 +5166,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo)other); @@ -5097,21 +5224,19 @@ public final class HBaseProtos { if (other.hasReplicaId()) { setReplicaId(other.getReplicaId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionId()) { - return false; } if (!hasTableName()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -5126,7 +5251,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5136,7 +5261,6 @@ public final class HBaseProtos { } private int bitField0_; - // required uint64 region_id = 1; private long regionId_ ; /** * required uint64 region_id = 1; @@ -5169,9 +5293,8 @@ public final class HBaseProtos { return this; } - // required .hbase.pb.TableName table_name = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 2; @@ -5184,7 +5307,7 @@ public final class HBaseProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -5225,6 +5348,7 @@ public final class HBaseProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -5243,7 +5367,7 @@ public final class HBaseProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -5266,19 +5390,20 @@ public final class HBaseProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -5286,7 +5411,6 @@ public final class HBaseProtos { return tableNameBuilder_; } - // optional bytes start_key = 3; private com.google.protobuf.ByteString startKey_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes start_key = 3; @@ -5322,7 +5446,6 @@ public final class HBaseProtos { return this; } - // optional bytes end_key = 4; private com.google.protobuf.ByteString endKey_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes end_key = 4; @@ -5358,7 +5481,6 @@ public final class HBaseProtos { return this; } - // optional bool offline = 5; private boolean offline_ ; /** * optional bool offline = 5; @@ -5391,7 +5513,6 @@ public final class HBaseProtos { return this; } - // optional bool split = 6; private boolean split_ ; /** * optional bool split = 6; @@ -5424,7 +5545,6 @@ public final class HBaseProtos { return this; } - // optional int32 replica_id = 7 [default = 0]; private int replicaId_ ; /** * optional int32 replica_id = 7 [default = 0]; @@ -5456,22 +5576,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionInfo) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionInfo) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo DEFAULT_INSTANCE; static { - defaultInstance = new RegionInfo(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionInfo) } - public interface FavoredNodesOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FavoredNodesOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FavoredNodes) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.ServerName favored_node = 1; /** * repeated .hbase.pb.ServerName favored_node = 1; */ @@ -5497,43 +5654,35 @@ public final class HBaseProtos { int index); } /** - * Protobuf type {@code hbase.pb.FavoredNodes} - * *
    **
    * Protocol buffer for favored nodes
    * 
+ * + * Protobuf type {@code hbase.pb.FavoredNodes} */ - public static final class FavoredNodes extends - com.google.protobuf.GeneratedMessage - implements FavoredNodesOrBuilder { + public static final class FavoredNodes extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FavoredNodes) + FavoredNodesOrBuilder { // Use FavoredNodes.newBuilder() to construct. - private FavoredNodes(com.google.protobuf.GeneratedMessage.Builder builder) { + private FavoredNodes(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FavoredNodes(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FavoredNodes defaultInstance; - public static FavoredNodes getDefaultInstance() { - return defaultInstance; } - - public FavoredNodes getDefaultInstanceForType() { - return defaultInstance; + private FavoredNodes() { + favoredNode_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FavoredNodes( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -5557,7 +5706,8 @@ public final class HBaseProtos { favoredNode_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - favoredNode_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); + favoredNode_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); break; } } @@ -5566,7 +5716,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { favoredNode_ = java.util.Collections.unmodifiableList(favoredNode_); @@ -5580,29 +5730,13 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FavoredNodes parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FavoredNodes(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.ServerName favored_node = 1; public static final int FAVORED_NODE_FIELD_NUMBER = 1; private java.util.List favoredNode_; /** @@ -5638,13 +5772,11 @@ public final class HBaseProtos { return favoredNode_.get(index); } - private void initFields() { - favoredNode_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getFavoredNodeCount(); i++) { if (!getFavoredNode(i).isInitialized()) { @@ -5658,16 +5790,14 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < favoredNode_.size(); i++) { output.writeMessage(1, favoredNode_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -5675,19 +5805,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, favoredNode_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5700,12 +5824,10 @@ public final class HBaseProtos { boolean result = true; result = result && getFavoredNodeList() .equals(other.getFavoredNodeList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5717,7 +5839,7 @@ public final class HBaseProtos { hash = (37 * hash) + FAVORED_NODE_FIELD_NUMBER; hash = (53 * hash) + getFavoredNodeList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5745,66 +5867,78 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.FavoredNodes} - * *
      **
      * Protocol buffer for favored nodes
      * 
+ * + * Protobuf type {@code hbase.pb.FavoredNodes} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodesOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FavoredNodes) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodesOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5817,19 +5951,16 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getFavoredNodeFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (favoredNodeBuilder_ == null) { @@ -5841,10 +5972,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_descriptor; @@ -5878,6 +6005,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes)other); @@ -5908,21 +6061,21 @@ public final class HBaseProtos { favoredNode_ = other.favoredNode_; bitField0_ = (bitField0_ & ~0x00000001); favoredNodeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFavoredNodeFieldBuilder() : null; } else { favoredNodeBuilder_.addAllMessages(other.favoredNode_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getFavoredNodeCount(); i++) { if (!getFavoredNode(i).isInitialized()) { - return false; } } @@ -5938,7 +6091,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5948,7 +6101,6 @@ public final class HBaseProtos { } private int bitField0_; - // repeated .hbase.pb.ServerName favored_node = 1; private java.util.List favoredNode_ = java.util.Collections.emptyList(); private void ensureFavoredNodeIsMutable() { @@ -5958,7 +6110,7 @@ public final class HBaseProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> favoredNodeBuilder_; /** @@ -6090,7 +6242,8 @@ public final class HBaseProtos { java.lang.Iterable values) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); - super.addAll(values, favoredNode_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, favoredNode_); onChanged(); } else { favoredNodeBuilder_.addAllMessages(values); @@ -6173,11 +6326,11 @@ public final class HBaseProtos { getFavoredNodeBuilderList() { return getFavoredNodeFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeFieldBuilder() { if (favoredNodeBuilder_ == null) { - favoredNodeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + favoredNodeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( favoredNode_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -6187,22 +6340,59 @@ public final class HBaseProtos { } return favoredNodeBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FavoredNodes) } + // @@protoc_insertion_point(class_scope:hbase.pb.FavoredNodes) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes DEFAULT_INSTANCE; static { - defaultInstance = new FavoredNodes(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FavoredNodes parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FavoredNodes(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FavoredNodes) } - public interface RegionSpecifierOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionSpecifierOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionSpecifier) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; /** * required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; */ @@ -6212,7 +6402,6 @@ public final class HBaseProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType(); - // required bytes value = 2; /** * required bytes value = 2; */ @@ -6223,8 +6412,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getValue(); } /** - * Protobuf type {@code hbase.pb.RegionSpecifier} - * *
    **
    * Container protocol buffer to specify a region.
@@ -6232,37 +6419,32 @@ public final class HBaseProtos {
    * of the region name, which is known as encoded
    * region name.
    * 
+ * + * Protobuf type {@code hbase.pb.RegionSpecifier} */ - public static final class RegionSpecifier extends - com.google.protobuf.GeneratedMessage - implements RegionSpecifierOrBuilder { + public static final class RegionSpecifier extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionSpecifier) + RegionSpecifierOrBuilder { // Use RegionSpecifier.newBuilder() to construct. - private RegionSpecifier(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionSpecifier(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RegionSpecifier(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionSpecifier defaultInstance; - public static RegionSpecifier getDefaultInstance() { - return defaultInstance; - } - - public RegionSpecifier getDefaultInstanceForType() { - return defaultInstance; + private RegionSpecifier() { + type_ = 1; + value_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionSpecifier( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6288,7 +6470,7 @@ public final class HBaseProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - type_ = value; + type_ = rawValue; } break; } @@ -6303,7 +6485,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -6314,72 +6496,67 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionSpecifier parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionSpecifier(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.RegionSpecifier.RegionSpecifierType} */ public enum RegionSpecifierType implements com.google.protobuf.ProtocolMessageEnum { /** - * REGION_NAME = 1; - * *
        * <tablename>,<startkey>,<regionId>.<encodedName>
        * 
+ * + * REGION_NAME = 1; */ - REGION_NAME(0, 1), + REGION_NAME(1), /** - * ENCODED_REGION_NAME = 2; - * *
        * hash of <tablename>,<startkey>,<regionId>
        * 
+ * + * ENCODED_REGION_NAME = 2; */ - ENCODED_REGION_NAME(1, 2), + ENCODED_REGION_NAME(2), ; /** - * REGION_NAME = 1; - * *
        * <tablename>,<startkey>,<regionId>.<encodedName>
        * 
+ * + * REGION_NAME = 1; */ public static final int REGION_NAME_VALUE = 1; /** - * ENCODED_REGION_NAME = 2; - * *
        * hash of <tablename>,<startkey>,<regionId>
        * 
+ * + * ENCODED_REGION_NAME = 2; */ public static final int ENCODED_REGION_NAME_VALUE = 2; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static RegionSpecifierType valueOf(int value) { + return forNumber(value); + } + + public static RegionSpecifierType forNumber(int value) { switch (value) { case 1: return REGION_NAME; case 2: return ENCODED_REGION_NAME; @@ -6391,17 +6568,17 @@ public final class HBaseProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + RegionSpecifierType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public RegionSpecifierType findValueByNumber(int number) { - return RegionSpecifierType.valueOf(number); + return RegionSpecifierType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -6423,11 +6600,9 @@ public final class HBaseProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private RegionSpecifierType(int index, int value) { - this.index = index; + private RegionSpecifierType(int value) { this.value = value; } @@ -6435,9 +6610,8 @@ public final class HBaseProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; public static final int TYPE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_; + private int type_; /** * required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; */ @@ -6448,10 +6622,10 @@ public final class HBaseProtos { * required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { - return type_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(type_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME : result; } - // required bytes value = 2; public static final int VALUE_FIELD_NUMBER = 2; private com.google.protobuf.ByteString value_; /** @@ -6467,14 +6641,11 @@ public final class HBaseProtos { return value_; } - private void initFields() { - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; - value_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasType()) { memoizedIsInitialized = 0; @@ -6490,43 +6661,35 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, type_.getNumber()); + output.writeEnum(1, type_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, value_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, type_.getNumber()); + .computeEnumSize(1, type_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, value_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6539,20 +6702,17 @@ public final class HBaseProtos { boolean result = true; result = result && (hasType() == other.hasType()); if (hasType()) { - result = result && - (getType() == other.getType()); + result = result && type_ == other.type_; } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() .equals(other.getValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6562,13 +6722,13 @@ public final class HBaseProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getType()); + hash = (53 * hash) + type_; } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6596,52 +6756,61 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RegionSpecifier} - * *
      **
      * Container protocol buffer to specify a region.
@@ -6649,16 +6818,19 @@ public final class HBaseProtos {
      * of the region name, which is known as encoded
      * region name.
      * 
+ * + * Protobuf type {@code hbase.pb.RegionSpecifier} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionSpecifier) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6671,31 +6843,24 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + type_ = 1; bitField0_ = (bitField0_ & ~0x00000001); value_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_descriptor; @@ -6730,6 +6895,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier)other); @@ -6747,17 +6938,16 @@ public final class HBaseProtos { if (other.hasValue()) { setValue(other.getValue()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasType()) { - return false; } if (!hasValue()) { - return false; } return true; @@ -6772,7 +6962,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6782,8 +6972,7 @@ public final class HBaseProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + private int type_ = 1; /** * required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; */ @@ -6794,7 +6983,8 @@ public final class HBaseProtos { * required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { - return type_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(type_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME : result; } /** * required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; @@ -6804,7 +6994,7 @@ public final class HBaseProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - type_ = value; + type_ = value.getNumber(); onChanged(); return this; } @@ -6813,12 +7003,11 @@ public final class HBaseProtos { */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + type_ = 1; onChanged(); return this; } - // required bytes value = 2; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes value = 2; @@ -6853,22 +7042,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionSpecifier) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionSpecifier) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier DEFAULT_INSTANCE; static { - defaultInstance = new RegionSpecifier(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionSpecifier parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionSpecifier(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionSpecifier) } - public interface TimeRangeOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TimeRangeOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TimeRange) + com.google.protobuf.MessageOrBuilder { - // optional uint64 from = 1; /** * optional uint64 from = 1; */ @@ -6878,7 +7104,6 @@ public final class HBaseProtos { */ long getFrom(); - // optional uint64 to = 2; /** * optional uint64 to = 2; */ @@ -6889,8 +7114,6 @@ public final class HBaseProtos { long getTo(); } /** - * Protobuf type {@code hbase.pb.TimeRange} - * *
    **
    * A range of time. Both from and to are Java time
@@ -6898,37 +7121,32 @@ public final class HBaseProtos {
    * range, it means all time.  By default, if not
    * specified, from = 0, and to = Long.MAX_VALUE
    * 
+ * + * Protobuf type {@code hbase.pb.TimeRange} */ - public static final class TimeRange extends - com.google.protobuf.GeneratedMessage - implements TimeRangeOrBuilder { + public static final class TimeRange extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TimeRange) + TimeRangeOrBuilder { // Use TimeRange.newBuilder() to construct. - private TimeRange(com.google.protobuf.GeneratedMessage.Builder builder) { + private TimeRange(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TimeRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TimeRange defaultInstance; - public static TimeRange getDefaultInstance() { - return defaultInstance; } - - public TimeRange getDefaultInstanceForType() { - return defaultInstance; + private TimeRange() { + from_ = 0L; + to_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TimeRange( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6963,7 +7181,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -6974,30 +7192,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TimeRange parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TimeRange(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 from = 1; public static final int FROM_FIELD_NUMBER = 1; private long from_; /** @@ -7013,7 +7215,6 @@ public final class HBaseProtos { return from_; } - // optional uint64 to = 2; public static final int TO_FIELD_NUMBER = 2; private long to_; /** @@ -7029,14 +7230,11 @@ public final class HBaseProtos { return to_; } - private void initFields() { - from_ = 0L; - to_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -7044,19 +7242,17 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, from_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, to_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -7068,19 +7264,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, to_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7101,12 +7291,10 @@ public final class HBaseProtos { result = result && (getTo() == other.getTo()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7116,13 +7304,15 @@ public final class HBaseProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFrom()) { hash = (37 * hash) + FROM_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getFrom()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getFrom()); } if (hasTo()) { hash = (37 * hash) + TO_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTo()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTo()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7150,52 +7340,61 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.TimeRange} - * *
      **
      * A range of time. Both from and to are Java time
@@ -7203,16 +7402,19 @@ public final class HBaseProtos {
      * range, it means all time.  By default, if not
      * specified, from = 0, and to = Long.MAX_VALUE
      * 
+ * + * Protobuf type {@code hbase.pb.TimeRange} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TimeRange) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7225,18 +7427,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); from_ = 0L; @@ -7246,10 +7445,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_descriptor; @@ -7284,6 +7479,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange)other); @@ -7301,7 +7522,8 @@ public final class HBaseProtos { if (other.hasTo()) { setTo(other.getTo()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -7318,7 +7540,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7328,7 +7550,6 @@ public final class HBaseProtos { } private int bitField0_; - // optional uint64 from = 1; private long from_ ; /** * optional uint64 from = 1; @@ -7361,7 +7582,6 @@ public final class HBaseProtos { return this; } - // optional uint64 to = 2; private long to_ ; /** * optional uint64 to = 2; @@ -7393,22 +7613,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TimeRange) } + // @@protoc_insertion_point(class_scope:hbase.pb.TimeRange) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange DEFAULT_INSTANCE; static { - defaultInstance = new TimeRange(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TimeRange parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TimeRange(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TimeRange) } - public interface ColumnFamilyTimeRangeOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ColumnFamilyTimeRangeOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ColumnFamilyTimeRange) + com.google.protobuf.MessageOrBuilder { - // required bytes column_family = 1; /** * required bytes column_family = 1; */ @@ -7418,7 +7675,6 @@ public final class HBaseProtos { */ com.google.protobuf.ByteString getColumnFamily(); - // required .hbase.pb.TimeRange time_range = 2; /** * required .hbase.pb.TimeRange time_range = 2; */ @@ -7433,42 +7689,34 @@ public final class HBaseProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); } /** - * Protobuf type {@code hbase.pb.ColumnFamilyTimeRange} - * *
    * ColumnFamily Specific TimeRange 
    * 
+ * + * Protobuf type {@code hbase.pb.ColumnFamilyTimeRange} */ - public static final class ColumnFamilyTimeRange extends - com.google.protobuf.GeneratedMessage - implements ColumnFamilyTimeRangeOrBuilder { + public static final class ColumnFamilyTimeRange extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ColumnFamilyTimeRange) + ColumnFamilyTimeRangeOrBuilder { // Use ColumnFamilyTimeRange.newBuilder() to construct. - private ColumnFamilyTimeRange(com.google.protobuf.GeneratedMessage.Builder builder) { + private ColumnFamilyTimeRange(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ColumnFamilyTimeRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnFamilyTimeRange defaultInstance; - public static ColumnFamilyTimeRange getDefaultInstance() { - return defaultInstance; } - - public ColumnFamilyTimeRange getDefaultInstanceForType() { - return defaultInstance; + private ColumnFamilyTimeRange() { + columnFamily_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ColumnFamilyTimeRange( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7511,7 +7759,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7522,30 +7770,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnFamilyTimeRange parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnFamilyTimeRange(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes column_family = 1; public static final int COLUMN_FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString columnFamily_; /** @@ -7561,7 +7793,6 @@ public final class HBaseProtos { return columnFamily_; } - // required .hbase.pb.TimeRange time_range = 2; public static final int TIME_RANGE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_; /** @@ -7574,23 +7805,20 @@ public final class HBaseProtos { * required .hbase.pb.TimeRange time_range = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } /** * required .hbase.pb.TimeRange time_range = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } - private void initFields() { - columnFamily_ = com.google.protobuf.ByteString.EMPTY; - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasColumnFamily()) { memoizedIsInitialized = 0; @@ -7606,19 +7834,17 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, columnFamily_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, timeRange_); + output.writeMessage(2, getTimeRange()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -7628,21 +7854,15 @@ public final class HBaseProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, timeRange_); + .computeMessageSize(2, getTimeRange()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7663,12 +7883,10 @@ public final class HBaseProtos { result = result && getTimeRange() .equals(other.getTimeRange()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7684,7 +7902,7 @@ public final class HBaseProtos { hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; hash = (53 * hash) + getTimeRange().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7712,65 +7930,77 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ColumnFamilyTimeRange} - * *
      * ColumnFamily Specific TimeRange 
      * 
+ * + * Protobuf type {@code hbase.pb.ColumnFamilyTimeRange} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ColumnFamilyTimeRange) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7783,25 +8013,22 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTimeRangeFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); columnFamily_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + timeRange_ = null; } else { timeRangeBuilder_.clear(); } @@ -7809,10 +8036,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; @@ -7851,6 +8074,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange)other); @@ -7868,17 +8117,16 @@ public final class HBaseProtos { if (other.hasTimeRange()) { mergeTimeRange(other.getTimeRange()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasColumnFamily()) { - return false; } if (!hasTimeRange()) { - return false; } return true; @@ -7893,7 +8141,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7903,7 +8151,6 @@ public final class HBaseProtos { } private int bitField0_; - // required bytes column_family = 1; private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes column_family = 1; @@ -7939,9 +8186,8 @@ public final class HBaseProtos { return this; } - // required .hbase.pb.TimeRange time_range = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; /** * required .hbase.pb.TimeRange time_range = 2; @@ -7954,7 +8200,7 @@ public final class HBaseProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { - return timeRange_; + return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } else { return timeRangeBuilder_.getMessage(); } @@ -7995,6 +8241,7 @@ public final class HBaseProtos { public Builder mergeTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + timeRange_ != null && timeRange_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); @@ -8013,7 +8260,7 @@ public final class HBaseProtos { */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + timeRange_ = null; onChanged(); } else { timeRangeBuilder_.clear(); @@ -8036,41 +8283,79 @@ public final class HBaseProtos { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); } else { - return timeRange_; + return timeRange_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } } /** * required .hbase.pb.TimeRange time_range = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, + getTimeRange(), getParentForChildren(), isClean()); timeRange_ = null; } return timeRangeBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnFamilyTimeRange) } + // @@protoc_insertion_point(class_scope:hbase.pb.ColumnFamilyTimeRange) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange DEFAULT_INSTANCE; static { - defaultInstance = new ColumnFamilyTimeRange(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ColumnFamilyTimeRange parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColumnFamilyTimeRange(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ColumnFamilyTimeRange) } - public interface ServerNameOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ServerNameOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ServerName) + com.google.protobuf.MessageOrBuilder { - // required string host_name = 1; /** * required string host_name = 1; */ @@ -8085,7 +8370,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getHostNameBytes(); - // optional uint32 port = 2; /** * optional uint32 port = 2; */ @@ -8095,7 +8379,6 @@ public final class HBaseProtos { */ int getPort(); - // optional uint64 start_code = 3; /** * optional uint64 start_code = 3; */ @@ -8106,43 +8389,37 @@ public final class HBaseProtos { long getStartCode(); } /** - * Protobuf type {@code hbase.pb.ServerName} - * *
    **
    * Protocol buffer version of ServerName
    * 
+ * + * Protobuf type {@code hbase.pb.ServerName} */ - public static final class ServerName extends - com.google.protobuf.GeneratedMessage - implements ServerNameOrBuilder { + public static final class ServerName extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ServerName) + ServerNameOrBuilder { // Use ServerName.newBuilder() to construct. - private ServerName(com.google.protobuf.GeneratedMessage.Builder builder) { + private ServerName(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ServerName(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ServerName defaultInstance; - public static ServerName getDefaultInstance() { - return defaultInstance; - } - - public ServerName getDefaultInstanceForType() { - return defaultInstance; + private ServerName() { + hostName_ = ""; + port_ = 0; + startCode_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ServerName( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8162,8 +8439,9 @@ public final class HBaseProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - hostName_ = input.readBytes(); + hostName_ = bs; break; } case 16: { @@ -8182,7 +8460,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -8193,32 +8471,16 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ServerName parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ServerName(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string host_name = 1; public static final int HOST_NAME_FIELD_NUMBER = 1; - private java.lang.Object hostName_; + private volatile java.lang.Object hostName_; /** * required string host_name = 1; */ @@ -8259,7 +8521,6 @@ public final class HBaseProtos { } } - // optional uint32 port = 2; public static final int PORT_FIELD_NUMBER = 2; private int port_; /** @@ -8275,7 +8536,6 @@ public final class HBaseProtos { return port_; } - // optional uint64 start_code = 3; public static final int START_CODE_FIELD_NUMBER = 3; private long startCode_; /** @@ -8291,15 +8551,11 @@ public final class HBaseProtos { return startCode_; } - private void initFields() { - hostName_ = ""; - port_ = 0; - startCode_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasHostName()) { memoizedIsInitialized = 0; @@ -8311,9 +8567,8 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getHostNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, hostName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, port_); @@ -8321,18 +8576,16 @@ public final class HBaseProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, startCode_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getHostNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, hostName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -8342,19 +8595,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, startCode_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8380,12 +8627,10 @@ public final class HBaseProtos { result = result && (getStartCode() == other.getStartCode()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -8403,9 +8648,10 @@ public final class HBaseProtos { } if (hasStartCode()) { hash = (37 * hash) + START_CODE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getStartCode()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStartCode()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -8433,66 +8679,78 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ServerName} - * *
      **
      * Protocol buffer version of ServerName
      * 
+ * + * Protobuf type {@code hbase.pb.ServerName} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ServerName) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8505,18 +8763,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); hostName_ = ""; @@ -8528,10 +8783,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_descriptor; @@ -8570,6 +8821,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName)other); @@ -8592,13 +8869,13 @@ public final class HBaseProtos { if (other.hasStartCode()) { setStartCode(other.getStartCode()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasHostName()) { - return false; } return true; @@ -8613,7 +8890,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8623,7 +8900,6 @@ public final class HBaseProtos { } private int bitField0_; - // required string host_name = 1; private java.lang.Object hostName_ = ""; /** * required string host_name = 1; @@ -8637,9 +8913,12 @@ public final class HBaseProtos { public java.lang.String getHostName() { java.lang.Object ref = hostName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - hostName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + hostName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -8697,7 +8976,6 @@ public final class HBaseProtos { return this; } - // optional uint32 port = 2; private int port_ ; /** * optional uint32 port = 2; @@ -8730,7 +9008,6 @@ public final class HBaseProtos { return this; } - // optional uint64 start_code = 3; private long startCode_ ; /** * optional uint64 start_code = 3; @@ -8762,22 +9039,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ServerName) } + // @@protoc_insertion_point(class_scope:hbase.pb.ServerName) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName DEFAULT_INSTANCE; static { - defaultInstance = new ServerName(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ServerName parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ServerName(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ServerName) } - public interface CoprocessorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CoprocessorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Coprocessor) + com.google.protobuf.MessageOrBuilder { - // required string name = 1; /** * required string name = 1; */ @@ -8795,36 +9109,28 @@ public final class HBaseProtos { /** * Protobuf type {@code hbase.pb.Coprocessor} */ - public static final class Coprocessor extends - com.google.protobuf.GeneratedMessage - implements CoprocessorOrBuilder { + public static final class Coprocessor extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Coprocessor) + CoprocessorOrBuilder { // Use Coprocessor.newBuilder() to construct. - private Coprocessor(com.google.protobuf.GeneratedMessage.Builder builder) { + private Coprocessor(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private Coprocessor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Coprocessor defaultInstance; - public static Coprocessor getDefaultInstance() { - return defaultInstance; - } - - public Coprocessor getDefaultInstanceForType() { - return defaultInstance; + private Coprocessor() { + name_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Coprocessor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8844,8 +9150,9 @@ public final class HBaseProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - name_ = input.readBytes(); + name_ = bs; break; } } @@ -8854,7 +9161,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -8865,32 +9172,16 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Coprocessor parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Coprocessor(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; + private volatile java.lang.Object name_; /** * required string name = 1; */ @@ -8931,13 +9222,11 @@ public final class HBaseProtos { } } - private void initFields() { - name_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; @@ -8949,36 +9238,27 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8994,12 +9274,10 @@ public final class HBaseProtos { result = result && getName() .equals(other.getName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9011,7 +9289,7 @@ public final class HBaseProtos { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9039,46 +9317,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -9086,14 +9375,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.Coprocessor} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Coprocessor) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9106,18 +9396,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); name_ = ""; @@ -9125,10 +9412,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_descriptor; @@ -9159,6 +9442,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor)other); @@ -9175,13 +9484,13 @@ public final class HBaseProtos { name_ = other.name_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { - return false; } return true; @@ -9196,7 +9505,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9206,7 +9515,6 @@ public final class HBaseProtos { } private int bitField0_; - // required string name = 1; private java.lang.Object name_ = ""; /** * required string name = 1; @@ -9220,9 +9528,12 @@ public final class HBaseProtos { public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - name_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; + } return s; } else { return (java.lang.String) ref; @@ -9279,22 +9590,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Coprocessor) } + // @@protoc_insertion_point(class_scope:hbase.pb.Coprocessor) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor DEFAULT_INSTANCE; static { - defaultInstance = new Coprocessor(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Coprocessor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Coprocessor(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Coprocessor) } - public interface NameStringPairOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface NameStringPairOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.NameStringPair) + com.google.protobuf.MessageOrBuilder { - // required string name = 1; /** * required string name = 1; */ @@ -9309,7 +9657,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getNameBytes(); - // required string value = 2; /** * required string value = 2; */ @@ -9327,36 +9674,29 @@ public final class HBaseProtos { /** * Protobuf type {@code hbase.pb.NameStringPair} */ - public static final class NameStringPair extends - com.google.protobuf.GeneratedMessage - implements NameStringPairOrBuilder { + public static final class NameStringPair extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.NameStringPair) + NameStringPairOrBuilder { // Use NameStringPair.newBuilder() to construct. - private NameStringPair(com.google.protobuf.GeneratedMessage.Builder builder) { + private NameStringPair(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private NameStringPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final NameStringPair defaultInstance; - public static NameStringPair getDefaultInstance() { - return defaultInstance; } - - public NameStringPair getDefaultInstanceForType() { - return defaultInstance; + private NameStringPair() { + name_ = ""; + value_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private NameStringPair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -9376,13 +9716,15 @@ public final class HBaseProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - name_ = input.readBytes(); + name_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - value_ = input.readBytes(); + value_ = bs; break; } } @@ -9391,7 +9733,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -9402,32 +9744,16 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NameStringPair parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new NameStringPair(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; + private volatile java.lang.Object name_; /** * required string name = 1; */ @@ -9468,9 +9794,8 @@ public final class HBaseProtos { } } - // required string value = 2; public static final int VALUE_FIELD_NUMBER = 2; - private java.lang.Object value_; + private volatile java.lang.Object value_; /** * required string value = 2; */ @@ -9511,14 +9836,11 @@ public final class HBaseProtos { } } - private void initFields() { - name_ = ""; - value_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; @@ -9534,43 +9856,33 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getValueBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, value_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getValueBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, value_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9591,12 +9903,10 @@ public final class HBaseProtos { result = result && getValue() .equals(other.getValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9612,7 +9922,7 @@ public final class HBaseProtos { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9640,46 +9950,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -9687,14 +10008,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.NameStringPair} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.NameStringPair) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9707,18 +10029,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); name_ = ""; @@ -9728,10 +10047,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_descriptor; @@ -9766,6 +10081,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair)other); @@ -9787,17 +10128,16 @@ public final class HBaseProtos { value_ = other.value_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { - return false; } if (!hasValue()) { - return false; } return true; @@ -9812,7 +10152,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9822,7 +10162,6 @@ public final class HBaseProtos { } private int bitField0_; - // required string name = 1; private java.lang.Object name_ = ""; /** * required string name = 1; @@ -9836,9 +10175,12 @@ public final class HBaseProtos { public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - name_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; + } return s; } else { return (java.lang.String) ref; @@ -9896,7 +10238,6 @@ public final class HBaseProtos { return this; } - // required string value = 2; private java.lang.Object value_ = ""; /** * required string value = 2; @@ -9910,9 +10251,12 @@ public final class HBaseProtos { public java.lang.String getValue() { java.lang.Object ref = value_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - value_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + value_ = s; + } return s; } else { return (java.lang.String) ref; @@ -9969,22 +10313,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.NameStringPair) } + // @@protoc_insertion_point(class_scope:hbase.pb.NameStringPair) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair DEFAULT_INSTANCE; static { - defaultInstance = new NameStringPair(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NameStringPair parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NameStringPair(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.NameStringPair) } - public interface NameBytesPairOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface NameBytesPairOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.NameBytesPair) + com.google.protobuf.MessageOrBuilder { - // required string name = 1; /** * required string name = 1; */ @@ -9999,7 +10380,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getNameBytes(); - // optional bytes value = 2; /** * optional bytes value = 2; */ @@ -10012,36 +10392,29 @@ public final class HBaseProtos { /** * Protobuf type {@code hbase.pb.NameBytesPair} */ - public static final class NameBytesPair extends - com.google.protobuf.GeneratedMessage - implements NameBytesPairOrBuilder { + public static final class NameBytesPair extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.NameBytesPair) + NameBytesPairOrBuilder { // Use NameBytesPair.newBuilder() to construct. - private NameBytesPair(com.google.protobuf.GeneratedMessage.Builder builder) { + private NameBytesPair(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private NameBytesPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final NameBytesPair defaultInstance; - public static NameBytesPair getDefaultInstance() { - return defaultInstance; - } - - public NameBytesPair getDefaultInstanceForType() { - return defaultInstance; + private NameBytesPair() { + name_ = ""; + value_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private NameBytesPair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -10061,8 +10434,9 @@ public final class HBaseProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - name_ = input.readBytes(); + name_ = bs; break; } case 18: { @@ -10076,7 +10450,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -10087,32 +10461,16 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NameBytesPair parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new NameBytesPair(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; + private volatile java.lang.Object name_; /** * required string name = 1; */ @@ -10153,7 +10511,6 @@ public final class HBaseProtos { } } - // optional bytes value = 2; public static final int VALUE_FIELD_NUMBER = 2; private com.google.protobuf.ByteString value_; /** @@ -10169,14 +10526,11 @@ public final class HBaseProtos { return value_; } - private void initFields() { - name_ = ""; - value_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; @@ -10188,43 +10542,34 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, value_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, value_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -10245,12 +10590,10 @@ public final class HBaseProtos { result = result && getValue() .equals(other.getValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -10266,7 +10609,7 @@ public final class HBaseProtos { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -10294,46 +10637,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -10341,14 +10695,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.NameBytesPair} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.NameBytesPair) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10361,18 +10716,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); name_ = ""; @@ -10382,10 +10734,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_descriptor; @@ -10420,6 +10768,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair)other); @@ -10439,13 +10813,13 @@ public final class HBaseProtos { if (other.hasValue()) { setValue(other.getValue()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { - return false; } return true; @@ -10460,7 +10834,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -10470,7 +10844,6 @@ public final class HBaseProtos { } private int bitField0_; - // required string name = 1; private java.lang.Object name_ = ""; /** * required string name = 1; @@ -10484,9 +10857,12 @@ public final class HBaseProtos { public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - name_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; + } return s; } else { return (java.lang.String) ref; @@ -10544,7 +10920,6 @@ public final class HBaseProtos { return this; } - // optional bytes value = 2; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes value = 2; @@ -10579,22 +10954,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.NameBytesPair) } + // @@protoc_insertion_point(class_scope:hbase.pb.NameBytesPair) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair DEFAULT_INSTANCE; static { - defaultInstance = new NameBytesPair(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NameBytesPair parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NameBytesPair(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.NameBytesPair) } - public interface BytesBytesPairOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface BytesBytesPairOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BytesBytesPair) + com.google.protobuf.MessageOrBuilder { - // required bytes first = 1; /** * required bytes first = 1; */ @@ -10604,7 +11016,6 @@ public final class HBaseProtos { */ com.google.protobuf.ByteString getFirst(); - // required bytes second = 2; /** * required bytes second = 2; */ @@ -10617,36 +11028,29 @@ public final class HBaseProtos { /** * Protobuf type {@code hbase.pb.BytesBytesPair} */ - public static final class BytesBytesPair extends - com.google.protobuf.GeneratedMessage - implements BytesBytesPairOrBuilder { + public static final class BytesBytesPair extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BytesBytesPair) + BytesBytesPairOrBuilder { // Use BytesBytesPair.newBuilder() to construct. - private BytesBytesPair(com.google.protobuf.GeneratedMessage.Builder builder) { + private BytesBytesPair(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private BytesBytesPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BytesBytesPair defaultInstance; - public static BytesBytesPair getDefaultInstance() { - return defaultInstance; } - - public BytesBytesPair getDefaultInstanceForType() { - return defaultInstance; + private BytesBytesPair() { + first_ = com.google.protobuf.ByteString.EMPTY; + second_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private BytesBytesPair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -10681,7 +11085,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -10692,30 +11096,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BytesBytesPair parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BytesBytesPair(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes first = 1; public static final int FIRST_FIELD_NUMBER = 1; private com.google.protobuf.ByteString first_; /** @@ -10731,7 +11119,6 @@ public final class HBaseProtos { return first_; } - // required bytes second = 2; public static final int SECOND_FIELD_NUMBER = 2; private com.google.protobuf.ByteString second_; /** @@ -10747,14 +11134,11 @@ public final class HBaseProtos { return second_; } - private void initFields() { - first_ = com.google.protobuf.ByteString.EMPTY; - second_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFirst()) { memoizedIsInitialized = 0; @@ -10770,19 +11154,17 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, first_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, second_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -10794,19 +11176,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, second_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -10827,12 +11203,10 @@ public final class HBaseProtos { result = result && getSecond() .equals(other.getSecond()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -10848,7 +11222,7 @@ public final class HBaseProtos { hash = (37 * hash) + SECOND_FIELD_NUMBER; hash = (53 * hash) + getSecond().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -10876,46 +11250,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -10923,14 +11308,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.BytesBytesPair} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BytesBytesPair) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10943,18 +11329,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); first_ = com.google.protobuf.ByteString.EMPTY; @@ -10964,10 +11347,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_descriptor; @@ -11002,6 +11381,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair)other); @@ -11019,17 +11424,16 @@ public final class HBaseProtos { if (other.hasSecond()) { setSecond(other.getSecond()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFirst()) { - return false; } if (!hasSecond()) { - return false; } return true; @@ -11044,7 +11448,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11054,7 +11458,6 @@ public final class HBaseProtos { } private int bitField0_; - // required bytes first = 1; private com.google.protobuf.ByteString first_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes first = 1; @@ -11090,7 +11493,6 @@ public final class HBaseProtos { return this; } - // required bytes second = 2; private com.google.protobuf.ByteString second_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes second = 2; @@ -11125,22 +11527,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BytesBytesPair) } + // @@protoc_insertion_point(class_scope:hbase.pb.BytesBytesPair) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair DEFAULT_INSTANCE; static { - defaultInstance = new BytesBytesPair(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public BytesBytesPair parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BytesBytesPair(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BytesBytesPair) } - public interface NameInt64PairOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface NameInt64PairOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.NameInt64Pair) + com.google.protobuf.MessageOrBuilder { - // optional string name = 1; /** * optional string name = 1; */ @@ -11155,7 +11594,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getNameBytes(); - // optional int64 value = 2; /** * optional int64 value = 2; */ @@ -11168,36 +11606,29 @@ public final class HBaseProtos { /** * Protobuf type {@code hbase.pb.NameInt64Pair} */ - public static final class NameInt64Pair extends - com.google.protobuf.GeneratedMessage - implements NameInt64PairOrBuilder { + public static final class NameInt64Pair extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.NameInt64Pair) + NameInt64PairOrBuilder { // Use NameInt64Pair.newBuilder() to construct. - private NameInt64Pair(com.google.protobuf.GeneratedMessage.Builder builder) { + private NameInt64Pair(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private NameInt64Pair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final NameInt64Pair defaultInstance; - public static NameInt64Pair getDefaultInstance() { - return defaultInstance; } - - public NameInt64Pair getDefaultInstanceForType() { - return defaultInstance; + private NameInt64Pair() { + name_ = ""; + value_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private NameInt64Pair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -11217,8 +11648,9 @@ public final class HBaseProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - name_ = input.readBytes(); + name_ = bs; break; } case 16: { @@ -11232,7 +11664,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11243,32 +11675,16 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NameInt64Pair parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new NameInt64Pair(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional string name = 1; public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; + private volatile java.lang.Object name_; /** * optional string name = 1; */ @@ -11309,7 +11725,6 @@ public final class HBaseProtos { } } - // optional int64 value = 2; public static final int VALUE_FIELD_NUMBER = 2; private long value_; /** @@ -11325,14 +11740,11 @@ public final class HBaseProtos { return value_; } - private void initFields() { - name_ = ""; - value_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -11340,43 +11752,34 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt64(2, value_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(2, value_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -11397,12 +11800,10 @@ public final class HBaseProtos { result = result && (getValue() == other.getValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -11416,9 +11817,10 @@ public final class HBaseProtos { } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getValue()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getValue()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -11446,46 +11848,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -11493,14 +11906,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.NameInt64Pair} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.NameInt64Pair) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11513,18 +11927,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); name_ = ""; @@ -11534,10 +11945,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_descriptor; @@ -11572,6 +11979,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair)other); @@ -11591,7 +12024,8 @@ public final class HBaseProtos { if (other.hasValue()) { setValue(other.getValue()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -11608,7 +12042,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11618,7 +12052,6 @@ public final class HBaseProtos { } private int bitField0_; - // optional string name = 1; private java.lang.Object name_ = ""; /** * optional string name = 1; @@ -11632,9 +12065,12 @@ public final class HBaseProtos { public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - name_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; + } return s; } else { return (java.lang.String) ref; @@ -11692,7 +12128,6 @@ public final class HBaseProtos { return this; } - // optional int64 value = 2; private long value_ ; /** * optional int64 value = 2; @@ -11724,22 +12159,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.NameInt64Pair) } + // @@protoc_insertion_point(class_scope:hbase.pb.NameInt64Pair) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair DEFAULT_INSTANCE; static { - defaultInstance = new NameInt64Pair(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NameInt64Pair parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NameInt64Pair(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.NameInt64Pair) } - public interface SnapshotDescriptionOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SnapshotDescriptionOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotDescription) + com.google.protobuf.MessageOrBuilder { - // required string name = 1; /** * required string name = 1; */ @@ -11754,34 +12226,32 @@ public final class HBaseProtos { com.google.protobuf.ByteString getNameBytes(); - // optional string table = 2; /** - * optional string table = 2; - * *
      * not needed for delete, but checked for in taking snapshot
      * 
+ * + * optional string table = 2; */ boolean hasTable(); /** - * optional string table = 2; - * *
      * not needed for delete, but checked for in taking snapshot
      * 
+ * + * optional string table = 2; */ java.lang.String getTable(); /** - * optional string table = 2; - * *
      * not needed for delete, but checked for in taking snapshot
      * 
+ * + * optional string table = 2; */ com.google.protobuf.ByteString getTableBytes(); - // optional int64 creation_time = 3 [default = 0]; /** * optional int64 creation_time = 3 [default = 0]; */ @@ -11791,7 +12261,6 @@ public final class HBaseProtos { */ long getCreationTime(); - // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; /** * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; */ @@ -11801,7 +12270,6 @@ public final class HBaseProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType(); - // optional int32 version = 5; /** * optional int32 version = 5; */ @@ -11811,7 +12279,6 @@ public final class HBaseProtos { */ int getVersion(); - // optional string owner = 6; /** * optional string owner = 6; */ @@ -11827,43 +12294,40 @@ public final class HBaseProtos { getOwnerBytes(); } /** - * Protobuf type {@code hbase.pb.SnapshotDescription} - * *
    **
    * Description of the snapshot to take
    * 
+ * + * Protobuf type {@code hbase.pb.SnapshotDescription} */ - public static final class SnapshotDescription extends - com.google.protobuf.GeneratedMessage - implements SnapshotDescriptionOrBuilder { + public static final class SnapshotDescription extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotDescription) + SnapshotDescriptionOrBuilder { // Use SnapshotDescription.newBuilder() to construct. - private SnapshotDescription(com.google.protobuf.GeneratedMessage.Builder builder) { + private SnapshotDescription(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SnapshotDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SnapshotDescription defaultInstance; - public static SnapshotDescription getDefaultInstance() { - return defaultInstance; } - - public SnapshotDescription getDefaultInstanceForType() { - return defaultInstance; + private SnapshotDescription() { + name_ = ""; + table_ = ""; + creationTime_ = 0L; + type_ = 1; + version_ = 0; + owner_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SnapshotDescription( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -11883,13 +12347,15 @@ public final class HBaseProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - name_ = input.readBytes(); + name_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - table_ = input.readBytes(); + table_ = bs; break; } case 24: { @@ -11904,7 +12370,7 @@ public final class HBaseProtos { unknownFields.mergeVarintField(4, rawValue); } else { bitField0_ |= 0x00000008; - type_ = value; + type_ = rawValue; } break; } @@ -11914,8 +12380,9 @@ public final class HBaseProtos { break; } case 50: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000020; - owner_ = input.readBytes(); + owner_ = bs; break; } } @@ -11924,7 +12391,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11935,28 +12402,13 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SnapshotDescription parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SnapshotDescription(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.SnapshotDescription.Type} */ @@ -11965,15 +12417,15 @@ public final class HBaseProtos { /** * DISABLED = 0; */ - DISABLED(0, 0), + DISABLED(0), /** * FLUSH = 1; */ - FLUSH(1, 1), + FLUSH(1), /** * SKIPFLUSH = 2; */ - SKIPFLUSH(2, 2), + SKIPFLUSH(2), ; /** @@ -11990,9 +12442,19 @@ public final class HBaseProtos { public static final int SKIPFLUSH_VALUE = 2; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static Type valueOf(int value) { + return forNumber(value); + } + + public static Type forNumber(int value) { switch (value) { case 0: return DISABLED; case 1: return FLUSH; @@ -12005,17 +12467,17 @@ public final class HBaseProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + Type> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public Type findValueByNumber(int number) { - return Type.valueOf(number); + return Type.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -12037,11 +12499,9 @@ public final class HBaseProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private Type(int index, int value) { - this.index = index; + private Type(int value) { this.value = value; } @@ -12049,9 +12509,8 @@ public final class HBaseProtos { } private int bitField0_; - // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; + private volatile java.lang.Object name_; /** * required string name = 1; */ @@ -12092,25 +12551,24 @@ public final class HBaseProtos { } } - // optional string table = 2; public static final int TABLE_FIELD_NUMBER = 2; - private java.lang.Object table_; + private volatile java.lang.Object table_; /** - * optional string table = 2; - * *
      * not needed for delete, but checked for in taking snapshot
      * 
+ * + * optional string table = 2; */ public boolean hasTable() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string table = 2; - * *
      * not needed for delete, but checked for in taking snapshot
      * 
+ * + * optional string table = 2; */ public java.lang.String getTable() { java.lang.Object ref = table_; @@ -12127,11 +12585,11 @@ public final class HBaseProtos { } } /** - * optional string table = 2; - * *
      * not needed for delete, but checked for in taking snapshot
      * 
+ * + * optional string table = 2; */ public com.google.protobuf.ByteString getTableBytes() { @@ -12147,7 +12605,6 @@ public final class HBaseProtos { } } - // optional int64 creation_time = 3 [default = 0]; public static final int CREATION_TIME_FIELD_NUMBER = 3; private long creationTime_; /** @@ -12163,9 +12620,8 @@ public final class HBaseProtos { return creationTime_; } - // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; public static final int TYPE_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_; + private int type_; /** * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; */ @@ -12176,10 +12632,10 @@ public final class HBaseProtos { * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() { - return type_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(type_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH : result; } - // optional int32 version = 5; public static final int VERSION_FIELD_NUMBER = 5; private int version_; /** @@ -12195,9 +12651,8 @@ public final class HBaseProtos { return version_; } - // optional string owner = 6; public static final int OWNER_FIELD_NUMBER = 6; - private java.lang.Object owner_; + private volatile java.lang.Object owner_; /** * optional string owner = 6; */ @@ -12238,18 +12693,11 @@ public final class HBaseProtos { } } - private void initFields() { - name_ = ""; - table_ = ""; - creationTime_ = 0L; - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; - version_ = 0; - owner_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; @@ -12261,41 +12709,37 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getTableBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, table_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt64(3, creationTime_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeEnum(4, type_.getNumber()); + output.writeEnum(4, type_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeInt32(5, version_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBytes(6, getOwnerBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, owner_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getTableBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, table_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -12303,29 +12747,22 @@ public final class HBaseProtos { } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(4, type_.getNumber()); + .computeEnumSize(4, type_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(5, version_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(6, getOwnerBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, owner_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -12353,8 +12790,7 @@ public final class HBaseProtos { } result = result && (hasType() == other.hasType()); if (hasType()) { - result = result && - (getType() == other.getType()); + result = result && type_ == other.type_; } result = result && (hasVersion() == other.hasVersion()); if (hasVersion()) { @@ -12366,12 +12802,10 @@ public final class HBaseProtos { result = result && getOwner() .equals(other.getOwner()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -12389,11 +12823,12 @@ public final class HBaseProtos { } if (hasCreationTime()) { hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCreationTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getCreationTime()); } if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getType()); + hash = (53 * hash) + type_; } if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; @@ -12403,7 +12838,7 @@ public final class HBaseProtos { hash = (37 * hash) + OWNER_FIELD_NUMBER; hash = (53 * hash) + getOwner().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -12431,66 +12866,78 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.SnapshotDescription} - * *
      **
      * Description of the snapshot to take
      * 
+ * + * Protobuf type {@code hbase.pb.SnapshotDescription} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotDescription) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -12503,18 +12950,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); name_ = ""; @@ -12523,7 +12967,7 @@ public final class HBaseProtos { bitField0_ = (bitField0_ & ~0x00000002); creationTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; + type_ = 1; bitField0_ = (bitField0_ & ~0x00000008); version_ = 0; bitField0_ = (bitField0_ & ~0x00000010); @@ -12532,10 +12976,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; @@ -12586,6 +13026,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription)other); @@ -12621,13 +13087,13 @@ public final class HBaseProtos { owner_ = other.owner_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { - return false; } return true; @@ -12642,7 +13108,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -12652,7 +13118,6 @@ public final class HBaseProtos { } private int bitField0_; - // required string name = 1; private java.lang.Object name_ = ""; /** * required string name = 1; @@ -12666,9 +13131,12 @@ public final class HBaseProtos { public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - name_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; + } return s; } else { return (java.lang.String) ref; @@ -12726,42 +13194,44 @@ public final class HBaseProtos { return this; } - // optional string table = 2; private java.lang.Object table_ = ""; /** - * optional string table = 2; - * *
        * not needed for delete, but checked for in taking snapshot
        * 
+ * + * optional string table = 2; */ public boolean hasTable() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string table = 2; - * *
        * not needed for delete, but checked for in taking snapshot
        * 
+ * + * optional string table = 2; */ public java.lang.String getTable() { java.lang.Object ref = table_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - table_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + table_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * optional string table = 2; - * *
        * not needed for delete, but checked for in taking snapshot
        * 
+ * + * optional string table = 2; */ public com.google.protobuf.ByteString getTableBytes() { @@ -12777,11 +13247,11 @@ public final class HBaseProtos { } } /** - * optional string table = 2; - * *
        * not needed for delete, but checked for in taking snapshot
        * 
+ * + * optional string table = 2; */ public Builder setTable( java.lang.String value) { @@ -12794,11 +13264,11 @@ public final class HBaseProtos { return this; } /** - * optional string table = 2; - * *
        * not needed for delete, but checked for in taking snapshot
        * 
+ * + * optional string table = 2; */ public Builder clearTable() { bitField0_ = (bitField0_ & ~0x00000002); @@ -12807,11 +13277,11 @@ public final class HBaseProtos { return this; } /** - * optional string table = 2; - * *
        * not needed for delete, but checked for in taking snapshot
        * 
+ * + * optional string table = 2; */ public Builder setTableBytes( com.google.protobuf.ByteString value) { @@ -12824,7 +13294,6 @@ public final class HBaseProtos { return this; } - // optional int64 creation_time = 3 [default = 0]; private long creationTime_ ; /** * optional int64 creation_time = 3 [default = 0]; @@ -12857,8 +13326,7 @@ public final class HBaseProtos { return this; } - // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; + private int type_ = 1; /** * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; */ @@ -12869,7 +13337,8 @@ public final class HBaseProtos { * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() { - return type_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(type_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH : result; } /** * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; @@ -12879,7 +13348,7 @@ public final class HBaseProtos { throw new NullPointerException(); } bitField0_ |= 0x00000008; - type_ = value; + type_ = value.getNumber(); onChanged(); return this; } @@ -12888,12 +13357,11 @@ public final class HBaseProtos { */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000008); - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; + type_ = 1; onChanged(); return this; } - // optional int32 version = 5; private int version_ ; /** * optional int32 version = 5; @@ -12926,7 +13394,6 @@ public final class HBaseProtos { return this; } - // optional string owner = 6; private java.lang.Object owner_ = ""; /** * optional string owner = 6; @@ -12940,9 +13407,12 @@ public final class HBaseProtos { public java.lang.String getOwner() { java.lang.Object ref = owner_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - owner_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + owner_ = s; + } return s; } else { return (java.lang.String) ref; @@ -12999,76 +13469,111 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDescription) } + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDescription) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription DEFAULT_INSTANCE; static { - defaultInstance = new SnapshotDescription(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription(); } - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDescription) - } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstance() { + return DEFAULT_INSTANCE; + } - public interface ProcedureDescriptionOrBuilder - extends com.google.protobuf.MessageOrBuilder { + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SnapshotDescription parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SnapshotDescription(input, extensionRegistry); + } + }; - // required string signature = 1; - /** - * required string signature = 1; - * - *
-     * the unique signature of the procedure
-     * 
- */ - boolean hasSignature(); - /** - * required string signature = 1; + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ProcedureDescriptionOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureDescription) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * the unique signature of the procedure
+     * 
* + * required string signature = 1; + */ + boolean hasSignature(); + /** *
      * the unique signature of the procedure
      * 
+ * + * required string signature = 1; */ java.lang.String getSignature(); /** - * required string signature = 1; - * *
      * the unique signature of the procedure
      * 
+ * + * required string signature = 1; */ com.google.protobuf.ByteString getSignatureBytes(); - // optional string instance = 2; /** - * optional string instance = 2; - * *
      * the procedure instance name
      * 
+ * + * optional string instance = 2; */ boolean hasInstance(); /** - * optional string instance = 2; - * *
      * the procedure instance name
      * 
+ * + * optional string instance = 2; */ java.lang.String getInstance(); /** - * optional string instance = 2; - * *
      * the procedure instance name
      * 
+ * + * optional string instance = 2; */ com.google.protobuf.ByteString getInstanceBytes(); - // optional int64 creation_time = 3 [default = 0]; /** * optional int64 creation_time = 3 [default = 0]; */ @@ -13078,7 +13583,6 @@ public final class HBaseProtos { */ long getCreationTime(); - // repeated .hbase.pb.NameStringPair configuration = 4; /** * repeated .hbase.pb.NameStringPair configuration = 4; */ @@ -13104,43 +13608,38 @@ public final class HBaseProtos { int index); } /** - * Protobuf type {@code hbase.pb.ProcedureDescription} - * *
    **
    * Description of the distributed procedure to take
    * 
+ * + * Protobuf type {@code hbase.pb.ProcedureDescription} */ - public static final class ProcedureDescription extends - com.google.protobuf.GeneratedMessage - implements ProcedureDescriptionOrBuilder { + public static final class ProcedureDescription extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureDescription) + ProcedureDescriptionOrBuilder { // Use ProcedureDescription.newBuilder() to construct. - private ProcedureDescription(com.google.protobuf.GeneratedMessage.Builder builder) { + private ProcedureDescription(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ProcedureDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ProcedureDescription defaultInstance; - public static ProcedureDescription getDefaultInstance() { - return defaultInstance; - } - - public ProcedureDescription getDefaultInstanceForType() { - return defaultInstance; + private ProcedureDescription() { + signature_ = ""; + instance_ = ""; + creationTime_ = 0L; + configuration_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ProcedureDescription( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -13160,13 +13659,15 @@ public final class HBaseProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - signature_ = input.readBytes(); + signature_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - instance_ = input.readBytes(); + instance_ = bs; break; } case 24: { @@ -13179,7 +13680,8 @@ public final class HBaseProtos { configuration_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } - configuration_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); + configuration_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } @@ -13188,7 +13690,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); @@ -13202,48 +13704,32 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ProcedureDescription parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ProcedureDescription(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string signature = 1; public static final int SIGNATURE_FIELD_NUMBER = 1; - private java.lang.Object signature_; + private volatile java.lang.Object signature_; /** - * required string signature = 1; - * *
      * the unique signature of the procedure
      * 
+ * + * required string signature = 1; */ public boolean hasSignature() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string signature = 1; - * *
      * the unique signature of the procedure
      * 
+ * + * required string signature = 1; */ public java.lang.String getSignature() { java.lang.Object ref = signature_; @@ -13260,11 +13746,11 @@ public final class HBaseProtos { } } /** - * required string signature = 1; - * *
      * the unique signature of the procedure
      * 
+ * + * required string signature = 1; */ public com.google.protobuf.ByteString getSignatureBytes() { @@ -13280,25 +13766,24 @@ public final class HBaseProtos { } } - // optional string instance = 2; public static final int INSTANCE_FIELD_NUMBER = 2; - private java.lang.Object instance_; + private volatile java.lang.Object instance_; /** - * optional string instance = 2; - * *
      * the procedure instance name
      * 
+ * + * optional string instance = 2; */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string instance = 2; - * *
      * the procedure instance name
      * 
+ * + * optional string instance = 2; */ public java.lang.String getInstance() { java.lang.Object ref = instance_; @@ -13315,11 +13800,11 @@ public final class HBaseProtos { } } /** - * optional string instance = 2; - * *
      * the procedure instance name
      * 
+ * + * optional string instance = 2; */ public com.google.protobuf.ByteString getInstanceBytes() { @@ -13335,7 +13820,6 @@ public final class HBaseProtos { } } - // optional int64 creation_time = 3 [default = 0]; public static final int CREATION_TIME_FIELD_NUMBER = 3; private long creationTime_; /** @@ -13351,7 +13835,6 @@ public final class HBaseProtos { return creationTime_; } - // repeated .hbase.pb.NameStringPair configuration = 4; public static final int CONFIGURATION_FIELD_NUMBER = 4; private java.util.List configuration_; /** @@ -13387,16 +13870,11 @@ public final class HBaseProtos { return configuration_.get(index); } - private void initFields() { - signature_ = ""; - instance_ = ""; - creationTime_ = 0L; - configuration_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasSignature()) { memoizedIsInitialized = 0; @@ -13414,12 +13892,11 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getSignatureBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, signature_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getInstanceBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instance_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt64(3, creationTime_); @@ -13427,22 +13904,19 @@ public final class HBaseProtos { for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(4, configuration_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getSignatureBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, signature_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getInstanceBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instance_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -13452,19 +13926,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, configuration_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -13492,12 +13960,10 @@ public final class HBaseProtos { } result = result && getConfigurationList() .equals(other.getConfigurationList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -13515,13 +13981,14 @@ public final class HBaseProtos { } if (hasCreationTime()) { hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCreationTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getCreationTime()); } if (getConfigurationCount() > 0) { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -13549,66 +14016,78 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ProcedureDescription} - * *
      **
      * Description of the distributed procedure to take
      * 
+ * + * Protobuf type {@code hbase.pb.ProcedureDescription} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureDescription) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -13621,19 +14100,16 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getConfigurationFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); signature_ = ""; @@ -13651,10 +14127,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_descriptor; @@ -13702,6 +14174,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription)other); @@ -13745,25 +14243,24 @@ public final class HBaseProtos { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); configurationBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasSignature()) { - return false; } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { - return false; } } @@ -13779,7 +14276,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -13789,42 +14286,44 @@ public final class HBaseProtos { } private int bitField0_; - // required string signature = 1; private java.lang.Object signature_ = ""; /** - * required string signature = 1; - * *
        * the unique signature of the procedure
        * 
+ * + * required string signature = 1; */ public boolean hasSignature() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string signature = 1; - * *
        * the unique signature of the procedure
        * 
+ * + * required string signature = 1; */ public java.lang.String getSignature() { java.lang.Object ref = signature_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - signature_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + signature_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * required string signature = 1; - * *
        * the unique signature of the procedure
        * 
+ * + * required string signature = 1; */ public com.google.protobuf.ByteString getSignatureBytes() { @@ -13840,11 +14339,11 @@ public final class HBaseProtos { } } /** - * required string signature = 1; - * *
        * the unique signature of the procedure
        * 
+ * + * required string signature = 1; */ public Builder setSignature( java.lang.String value) { @@ -13857,11 +14356,11 @@ public final class HBaseProtos { return this; } /** - * required string signature = 1; - * *
        * the unique signature of the procedure
        * 
+ * + * required string signature = 1; */ public Builder clearSignature() { bitField0_ = (bitField0_ & ~0x00000001); @@ -13870,11 +14369,11 @@ public final class HBaseProtos { return this; } /** - * required string signature = 1; - * *
        * the unique signature of the procedure
        * 
+ * + * required string signature = 1; */ public Builder setSignatureBytes( com.google.protobuf.ByteString value) { @@ -13887,42 +14386,44 @@ public final class HBaseProtos { return this; } - // optional string instance = 2; private java.lang.Object instance_ = ""; /** - * optional string instance = 2; - * *
        * the procedure instance name
        * 
+ * + * optional string instance = 2; */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string instance = 2; - * *
        * the procedure instance name
        * 
+ * + * optional string instance = 2; */ public java.lang.String getInstance() { java.lang.Object ref = instance_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - instance_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + instance_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * optional string instance = 2; - * *
        * the procedure instance name
        * 
+ * + * optional string instance = 2; */ public com.google.protobuf.ByteString getInstanceBytes() { @@ -13938,11 +14439,11 @@ public final class HBaseProtos { } } /** - * optional string instance = 2; - * *
        * the procedure instance name
        * 
+ * + * optional string instance = 2; */ public Builder setInstance( java.lang.String value) { @@ -13955,11 +14456,11 @@ public final class HBaseProtos { return this; } /** - * optional string instance = 2; - * *
        * the procedure instance name
        * 
+ * + * optional string instance = 2; */ public Builder clearInstance() { bitField0_ = (bitField0_ & ~0x00000002); @@ -13968,11 +14469,11 @@ public final class HBaseProtos { return this; } /** - * optional string instance = 2; - * *
        * the procedure instance name
        * 
+ * + * optional string instance = 2; */ public Builder setInstanceBytes( com.google.protobuf.ByteString value) { @@ -13985,7 +14486,6 @@ public final class HBaseProtos { return this; } - // optional int64 creation_time = 3 [default = 0]; private long creationTime_ ; /** * optional int64 creation_time = 3 [default = 0]; @@ -14018,7 +14518,6 @@ public final class HBaseProtos { return this; } - // repeated .hbase.pb.NameStringPair configuration = 4; private java.util.List configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { @@ -14028,7 +14527,7 @@ public final class HBaseProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** @@ -14160,7 +14659,8 @@ public final class HBaseProtos { java.lang.Iterable values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); - super.addAll(values, configuration_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); @@ -14243,11 +14743,11 @@ public final class HBaseProtos { getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { - configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000008) == 0x00000008), @@ -14257,54 +14757,83 @@ public final class HBaseProtos { } return configurationBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureDescription) } + // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureDescription) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription DEFAULT_INSTANCE; static { - defaultInstance = new ProcedureDescription(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ProcedureDescription parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ProcedureDescription(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureDescription) } - public interface EmptyMsgOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface EmptyMsgOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.EmptyMsg) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.EmptyMsg} */ - public static final class EmptyMsg extends - com.google.protobuf.GeneratedMessage - implements EmptyMsgOrBuilder { + public static final class EmptyMsg extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.EmptyMsg) + EmptyMsgOrBuilder { // Use EmptyMsg.newBuilder() to construct. - private EmptyMsg(com.google.protobuf.GeneratedMessage.Builder builder) { + private EmptyMsg(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private EmptyMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final EmptyMsg defaultInstance; - public static EmptyMsg getDefaultInstance() { - return defaultInstance; + private EmptyMsg() { } - public EmptyMsg getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private EmptyMsg( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -14328,7 +14857,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -14339,34 +14868,18 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public EmptyMsg parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new EmptyMsg(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -14374,29 +14887,21 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -14407,12 +14912,10 @@ public final class HBaseProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -14420,7 +14923,7 @@ public final class HBaseProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -14448,46 +14951,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -14495,14 +15009,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.EmptyMsg} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsgOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.EmptyMsg) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsgOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -14515,27 +15030,20 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_descriptor; @@ -14559,6 +15067,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg)other); @@ -14570,7 +15104,8 @@ public final class HBaseProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -14587,7 +15122,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -14595,22 +15130,59 @@ public final class HBaseProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.EmptyMsg) } + // @@protoc_insertion_point(class_scope:hbase.pb.EmptyMsg) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg DEFAULT_INSTANCE; static { - defaultInstance = new EmptyMsg(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public EmptyMsg parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EmptyMsg(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.EmptyMsg) } - public interface LongMsgOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface LongMsgOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.LongMsg) + com.google.protobuf.MessageOrBuilder { - // required int64 long_msg = 1; /** * required int64 long_msg = 1; */ @@ -14623,36 +15195,28 @@ public final class HBaseProtos { /** * Protobuf type {@code hbase.pb.LongMsg} */ - public static final class LongMsg extends - com.google.protobuf.GeneratedMessage - implements LongMsgOrBuilder { + public static final class LongMsg extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.LongMsg) + LongMsgOrBuilder { // Use LongMsg.newBuilder() to construct. - private LongMsg(com.google.protobuf.GeneratedMessage.Builder builder) { + private LongMsg(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private LongMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final LongMsg defaultInstance; - public static LongMsg getDefaultInstance() { - return defaultInstance; } - - public LongMsg getDefaultInstanceForType() { - return defaultInstance; + private LongMsg() { + longMsg_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private LongMsg( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -14682,7 +15246,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -14693,30 +15257,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public LongMsg parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new LongMsg(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required int64 long_msg = 1; public static final int LONG_MSG_FIELD_NUMBER = 1; private long longMsg_; /** @@ -14732,13 +15280,11 @@ public final class HBaseProtos { return longMsg_; } - private void initFields() { - longMsg_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasLongMsg()) { memoizedIsInitialized = 0; @@ -14750,16 +15296,14 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, longMsg_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -14767,19 +15311,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, longMsg_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -14795,12 +15333,10 @@ public final class HBaseProtos { result = result && (getLongMsg() == other.getLongMsg()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -14810,9 +15346,10 @@ public final class HBaseProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLongMsg()) { hash = (37 * hash) + LONG_MSG_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLongMsg()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLongMsg()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -14840,46 +15377,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -14887,14 +15435,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.LongMsg} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsgOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.LongMsg) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsgOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -14907,18 +15456,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); longMsg_ = 0L; @@ -14926,10 +15472,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_descriptor; @@ -14960,6 +15502,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg)other); @@ -14974,13 +15542,13 @@ public final class HBaseProtos { if (other.hasLongMsg()) { setLongMsg(other.getLongMsg()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasLongMsg()) { - return false; } return true; @@ -14995,7 +15563,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15005,7 +15573,6 @@ public final class HBaseProtos { } private int bitField0_; - // required int64 long_msg = 1; private long longMsg_ ; /** * required int64 long_msg = 1; @@ -15037,22 +15604,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.LongMsg) } + // @@protoc_insertion_point(class_scope:hbase.pb.LongMsg) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg DEFAULT_INSTANCE; static { - defaultInstance = new LongMsg(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public LongMsg parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new LongMsg(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.LongMsg) } - public interface DoubleMsgOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DoubleMsgOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DoubleMsg) + com.google.protobuf.MessageOrBuilder { - // required double double_msg = 1; /** * required double double_msg = 1; */ @@ -15065,36 +15669,28 @@ public final class HBaseProtos { /** * Protobuf type {@code hbase.pb.DoubleMsg} */ - public static final class DoubleMsg extends - com.google.protobuf.GeneratedMessage - implements DoubleMsgOrBuilder { + public static final class DoubleMsg extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DoubleMsg) + DoubleMsgOrBuilder { // Use DoubleMsg.newBuilder() to construct. - private DoubleMsg(com.google.protobuf.GeneratedMessage.Builder builder) { + private DoubleMsg(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private DoubleMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DoubleMsg defaultInstance; - public static DoubleMsg getDefaultInstance() { - return defaultInstance; - } - - public DoubleMsg getDefaultInstanceForType() { - return defaultInstance; + private DoubleMsg() { + doubleMsg_ = 0D; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DoubleMsg( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -15124,7 +15720,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -15135,30 +15731,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DoubleMsg parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DoubleMsg(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required double double_msg = 1; public static final int DOUBLE_MSG_FIELD_NUMBER = 1; private double doubleMsg_; /** @@ -15174,13 +15754,11 @@ public final class HBaseProtos { return doubleMsg_; } - private void initFields() { - doubleMsg_ = 0D; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasDoubleMsg()) { memoizedIsInitialized = 0; @@ -15192,16 +15770,14 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeDouble(1, doubleMsg_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -15209,19 +15785,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeDoubleSize(1, doubleMsg_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -15234,14 +15804,15 @@ public final class HBaseProtos { boolean result = true; result = result && (hasDoubleMsg() == other.hasDoubleMsg()); if (hasDoubleMsg()) { - result = result && (Double.doubleToLongBits(getDoubleMsg()) == Double.doubleToLongBits(other.getDoubleMsg())); + result = result && ( + java.lang.Double.doubleToLongBits(getDoubleMsg()) + == java.lang.Double.doubleToLongBits( + other.getDoubleMsg())); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -15251,10 +15822,10 @@ public final class HBaseProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDoubleMsg()) { hash = (37 * hash) + DOUBLE_MSG_FIELD_NUMBER; - hash = (53 * hash) + hashLong( - Double.doubleToLongBits(getDoubleMsg())); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getDoubleMsg())); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -15282,46 +15853,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -15329,14 +15911,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.DoubleMsg} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsgOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DoubleMsg) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsgOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -15349,18 +15932,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); doubleMsg_ = 0D; @@ -15368,10 +15948,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_descriptor; @@ -15402,6 +15978,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg)other); @@ -15416,13 +16018,13 @@ public final class HBaseProtos { if (other.hasDoubleMsg()) { setDoubleMsg(other.getDoubleMsg()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasDoubleMsg()) { - return false; } return true; @@ -15437,7 +16039,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15447,7 +16049,6 @@ public final class HBaseProtos { } private int bitField0_; - // required double double_msg = 1; private double doubleMsg_ ; /** * required double double_msg = 1; @@ -15479,22 +16080,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DoubleMsg) } + // @@protoc_insertion_point(class_scope:hbase.pb.DoubleMsg) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg DEFAULT_INSTANCE; static { - defaultInstance = new DoubleMsg(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DoubleMsg parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DoubleMsg(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DoubleMsg) } - public interface BigDecimalMsgOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface BigDecimalMsgOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BigDecimalMsg) + com.google.protobuf.MessageOrBuilder { - // required bytes bigdecimal_msg = 1; /** * required bytes bigdecimal_msg = 1; */ @@ -15507,36 +16145,28 @@ public final class HBaseProtos { /** * Protobuf type {@code hbase.pb.BigDecimalMsg} */ - public static final class BigDecimalMsg extends - com.google.protobuf.GeneratedMessage - implements BigDecimalMsgOrBuilder { + public static final class BigDecimalMsg extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BigDecimalMsg) + BigDecimalMsgOrBuilder { // Use BigDecimalMsg.newBuilder() to construct. - private BigDecimalMsg(com.google.protobuf.GeneratedMessage.Builder builder) { + private BigDecimalMsg(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private BigDecimalMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BigDecimalMsg defaultInstance; - public static BigDecimalMsg getDefaultInstance() { - return defaultInstance; - } - - public BigDecimalMsg getDefaultInstanceForType() { - return defaultInstance; + private BigDecimalMsg() { + bigdecimalMsg_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private BigDecimalMsg( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -15566,7 +16196,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -15577,30 +16207,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BigDecimalMsg parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BigDecimalMsg(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes bigdecimal_msg = 1; public static final int BIGDECIMAL_MSG_FIELD_NUMBER = 1; private com.google.protobuf.ByteString bigdecimalMsg_; /** @@ -15616,13 +16230,11 @@ public final class HBaseProtos { return bigdecimalMsg_; } - private void initFields() { - bigdecimalMsg_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasBigdecimalMsg()) { memoizedIsInitialized = 0; @@ -15634,16 +16246,14 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, bigdecimalMsg_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -15651,19 +16261,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, bigdecimalMsg_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -15679,12 +16283,10 @@ public final class HBaseProtos { result = result && getBigdecimalMsg() .equals(other.getBigdecimalMsg()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -15696,7 +16298,7 @@ public final class HBaseProtos { hash = (37 * hash) + BIGDECIMAL_MSG_FIELD_NUMBER; hash = (53 * hash) + getBigdecimalMsg().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -15724,46 +16326,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -15771,14 +16384,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.BigDecimalMsg} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsgOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BigDecimalMsg) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsgOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -15791,18 +16405,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); bigdecimalMsg_ = com.google.protobuf.ByteString.EMPTY; @@ -15810,10 +16421,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_descriptor; @@ -15844,6 +16451,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg)other); @@ -15858,13 +16491,13 @@ public final class HBaseProtos { if (other.hasBigdecimalMsg()) { setBigdecimalMsg(other.getBigdecimalMsg()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasBigdecimalMsg()) { - return false; } return true; @@ -15879,7 +16512,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15889,7 +16522,6 @@ public final class HBaseProtos { } private int bitField0_; - // required bytes bigdecimal_msg = 1; private com.google.protobuf.ByteString bigdecimalMsg_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes bigdecimal_msg = 1; @@ -15924,22 +16556,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BigDecimalMsg) } + // @@protoc_insertion_point(class_scope:hbase.pb.BigDecimalMsg) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg DEFAULT_INSTANCE; static { - defaultInstance = new BigDecimalMsg(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public BigDecimalMsg parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BigDecimalMsg(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BigDecimalMsg) } - public interface UUIDOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface UUIDOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.UUID) + com.google.protobuf.MessageOrBuilder { - // required uint64 least_sig_bits = 1; /** * required uint64 least_sig_bits = 1; */ @@ -15949,7 +16618,6 @@ public final class HBaseProtos { */ long getLeastSigBits(); - // required uint64 most_sig_bits = 2; /** * required uint64 most_sig_bits = 2; */ @@ -15962,36 +16630,29 @@ public final class HBaseProtos { /** * Protobuf type {@code hbase.pb.UUID} */ - public static final class UUID extends - com.google.protobuf.GeneratedMessage - implements UUIDOrBuilder { + public static final class UUID extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.UUID) + UUIDOrBuilder { // Use UUID.newBuilder() to construct. - private UUID(com.google.protobuf.GeneratedMessage.Builder builder) { + private UUID(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private UUID(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final UUID defaultInstance; - public static UUID getDefaultInstance() { - return defaultInstance; } - - public UUID getDefaultInstanceForType() { - return defaultInstance; + private UUID() { + leastSigBits_ = 0L; + mostSigBits_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private UUID( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -16026,7 +16687,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -16037,30 +16698,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public UUID parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new UUID(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint64 least_sig_bits = 1; public static final int LEAST_SIG_BITS_FIELD_NUMBER = 1; private long leastSigBits_; /** @@ -16076,7 +16721,6 @@ public final class HBaseProtos { return leastSigBits_; } - // required uint64 most_sig_bits = 2; public static final int MOST_SIG_BITS_FIELD_NUMBER = 2; private long mostSigBits_; /** @@ -16092,14 +16736,11 @@ public final class HBaseProtos { return mostSigBits_; } - private void initFields() { - leastSigBits_ = 0L; - mostSigBits_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasLeastSigBits()) { memoizedIsInitialized = 0; @@ -16115,19 +16756,17 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, leastSigBits_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, mostSigBits_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -16139,19 +16778,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, mostSigBits_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -16172,12 +16805,10 @@ public final class HBaseProtos { result = result && (getMostSigBits() == other.getMostSigBits()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -16187,13 +16818,15 @@ public final class HBaseProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLeastSigBits()) { hash = (37 * hash) + LEAST_SIG_BITS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLeastSigBits()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLeastSigBits()); } if (hasMostSigBits()) { hash = (37 * hash) + MOST_SIG_BITS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getMostSigBits()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getMostSigBits()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -16221,46 +16854,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -16268,14 +16912,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.UUID} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.UUID) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -16288,18 +16933,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); leastSigBits_ = 0L; @@ -16309,10 +16951,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_descriptor; @@ -16347,6 +16985,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID)other); @@ -16364,17 +17028,16 @@ public final class HBaseProtos { if (other.hasMostSigBits()) { setMostSigBits(other.getMostSigBits()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasLeastSigBits()) { - return false; } if (!hasMostSigBits()) { - return false; } return true; @@ -16389,7 +17052,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -16399,7 +17062,6 @@ public final class HBaseProtos { } private int bitField0_; - // required uint64 least_sig_bits = 1; private long leastSigBits_ ; /** * required uint64 least_sig_bits = 1; @@ -16432,7 +17094,6 @@ public final class HBaseProtos { return this; } - // required uint64 most_sig_bits = 2; private long mostSigBits_ ; /** * required uint64 most_sig_bits = 2; @@ -16464,22 +17125,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.UUID) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.UUID) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public UUID parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UUID(input, extensionRegistry); + } + }; - // @@protoc_insertion_point(builder_scope:hbase.pb.UUID) + public static com.google.protobuf.Parser parser() { + return PARSER; } - static { - defaultInstance = new UUID(true); - defaultInstance.initFields(); + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.UUID) } - public interface NamespaceDescriptorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface NamespaceDescriptorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.NamespaceDescriptor) + com.google.protobuf.MessageOrBuilder { - // required bytes name = 1; /** * required bytes name = 1; */ @@ -16489,7 +17187,6 @@ public final class HBaseProtos { */ com.google.protobuf.ByteString getName(); - // repeated .hbase.pb.NameStringPair configuration = 2; /** * repeated .hbase.pb.NameStringPair configuration = 2; */ @@ -16517,36 +17214,29 @@ public final class HBaseProtos { /** * Protobuf type {@code hbase.pb.NamespaceDescriptor} */ - public static final class NamespaceDescriptor extends - com.google.protobuf.GeneratedMessage - implements NamespaceDescriptorOrBuilder { + public static final class NamespaceDescriptor extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.NamespaceDescriptor) + NamespaceDescriptorOrBuilder { // Use NamespaceDescriptor.newBuilder() to construct. - private NamespaceDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { + private NamespaceDescriptor(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private NamespaceDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final NamespaceDescriptor defaultInstance; - public static NamespaceDescriptor getDefaultInstance() { - return defaultInstance; - } - - public NamespaceDescriptor getDefaultInstanceForType() { - return defaultInstance; + private NamespaceDescriptor() { + name_ = com.google.protobuf.ByteString.EMPTY; + configuration_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private NamespaceDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -16575,7 +17265,8 @@ public final class HBaseProtos { configuration_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - configuration_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); + configuration_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } @@ -16584,7 +17275,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); @@ -16598,30 +17289,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NamespaceDescriptor parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new NamespaceDescriptor(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes name = 1; public static final int NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString name_; /** @@ -16637,7 +17312,6 @@ public final class HBaseProtos { return name_; } - // repeated .hbase.pb.NameStringPair configuration = 2; public static final int CONFIGURATION_FIELD_NUMBER = 2; private java.util.List configuration_; /** @@ -16673,14 +17347,11 @@ public final class HBaseProtos { return configuration_.get(index); } - private void initFields() { - name_ = com.google.protobuf.ByteString.EMPTY; - configuration_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; @@ -16698,19 +17369,17 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, name_); } for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(2, configuration_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -16722,19 +17391,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, configuration_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -16752,12 +17415,10 @@ public final class HBaseProtos { } result = result && getConfigurationList() .equals(other.getConfigurationList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -16773,7 +17434,7 @@ public final class HBaseProtos { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -16801,46 +17462,57 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -16848,14 +17520,15 @@ public final class HBaseProtos { * Protobuf type {@code hbase.pb.NamespaceDescriptor} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.NamespaceDescriptor) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -16868,19 +17541,16 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getConfigurationFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); name_ = com.google.protobuf.ByteString.EMPTY; @@ -16894,10 +17564,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_descriptor; @@ -16937,6 +17603,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor)other); @@ -16970,25 +17662,24 @@ public final class HBaseProtos { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000002); configurationBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { - return false; } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { - return false; } } @@ -17004,7 +17695,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -17014,7 +17705,6 @@ public final class HBaseProtos { } private int bitField0_; - // required bytes name = 1; private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes name = 1; @@ -17050,7 +17740,6 @@ public final class HBaseProtos { return this; } - // repeated .hbase.pb.NameStringPair configuration = 2; private java.util.List configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { @@ -17060,7 +17749,7 @@ public final class HBaseProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** @@ -17192,7 +17881,8 @@ public final class HBaseProtos { java.lang.Iterable values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); - super.addAll(values, configuration_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); @@ -17275,11 +17965,11 @@ public final class HBaseProtos { getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { - configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -17289,22 +17979,59 @@ public final class HBaseProtos { } return configurationBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.NamespaceDescriptor) } + // @@protoc_insertion_point(class_scope:hbase.pb.NamespaceDescriptor) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor DEFAULT_INSTANCE; static { - defaultInstance = new NamespaceDescriptor(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NamespaceDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NamespaceDescriptor(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.NamespaceDescriptor) } - public interface VersionInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface VersionInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.VersionInfo) + com.google.protobuf.MessageOrBuilder { - // required string version = 1; /** * required string version = 1; */ @@ -17319,7 +18046,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getVersionBytes(); - // required string url = 2; /** * required string url = 2; */ @@ -17334,7 +18060,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getUrlBytes(); - // required string revision = 3; /** * required string revision = 3; */ @@ -17349,7 +18074,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getRevisionBytes(); - // required string user = 4; /** * required string user = 4; */ @@ -17364,7 +18088,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getUserBytes(); - // required string date = 5; /** * required string date = 5; */ @@ -17379,7 +18102,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getDateBytes(); - // required string src_checksum = 6; /** * required string src_checksum = 6; */ @@ -17394,7 +18116,6 @@ public final class HBaseProtos { com.google.protobuf.ByteString getSrcChecksumBytes(); - // optional uint32 version_major = 7; /** * optional uint32 version_major = 7; */ @@ -17404,7 +18125,6 @@ public final class HBaseProtos { */ int getVersionMajor(); - // optional uint32 version_minor = 8; /** * optional uint32 version_minor = 8; */ @@ -17415,42 +18135,41 @@ public final class HBaseProtos { int getVersionMinor(); } /** - * Protobuf type {@code hbase.pb.VersionInfo} - * *
    * Rpc client version info proto. Included in ConnectionHeader on connection setup
    * 
+ * + * Protobuf type {@code hbase.pb.VersionInfo} */ - public static final class VersionInfo extends - com.google.protobuf.GeneratedMessage - implements VersionInfoOrBuilder { + public static final class VersionInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.VersionInfo) + VersionInfoOrBuilder { // Use VersionInfo.newBuilder() to construct. - private VersionInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + private VersionInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private VersionInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final VersionInfo defaultInstance; - public static VersionInfo getDefaultInstance() { - return defaultInstance; } - - public VersionInfo getDefaultInstanceForType() { - return defaultInstance; + private VersionInfo() { + version_ = ""; + url_ = ""; + revision_ = ""; + user_ = ""; + date_ = ""; + srcChecksum_ = ""; + versionMajor_ = 0; + versionMinor_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private VersionInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -17470,33 +18189,39 @@ public final class HBaseProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - version_ = input.readBytes(); + version_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - url_ = input.readBytes(); + url_ = bs; break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - revision_ = input.readBytes(); + revision_ = bs; break; } case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; - user_ = input.readBytes(); + user_ = bs; break; } case 42: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; - date_ = input.readBytes(); + date_ = bs; break; } case 50: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000020; - srcChecksum_ = input.readBytes(); + srcChecksum_ = bs; break; } case 56: { @@ -17515,7 +18240,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -17526,32 +18251,16 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public VersionInfo parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new VersionInfo(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string version = 1; public static final int VERSION_FIELD_NUMBER = 1; - private java.lang.Object version_; + private volatile java.lang.Object version_; /** * required string version = 1; */ @@ -17592,9 +18301,8 @@ public final class HBaseProtos { } } - // required string url = 2; public static final int URL_FIELD_NUMBER = 2; - private java.lang.Object url_; + private volatile java.lang.Object url_; /** * required string url = 2; */ @@ -17635,9 +18343,8 @@ public final class HBaseProtos { } } - // required string revision = 3; public static final int REVISION_FIELD_NUMBER = 3; - private java.lang.Object revision_; + private volatile java.lang.Object revision_; /** * required string revision = 3; */ @@ -17678,9 +18385,8 @@ public final class HBaseProtos { } } - // required string user = 4; public static final int USER_FIELD_NUMBER = 4; - private java.lang.Object user_; + private volatile java.lang.Object user_; /** * required string user = 4; */ @@ -17721,9 +18427,8 @@ public final class HBaseProtos { } } - // required string date = 5; public static final int DATE_FIELD_NUMBER = 5; - private java.lang.Object date_; + private volatile java.lang.Object date_; /** * required string date = 5; */ @@ -17764,9 +18469,8 @@ public final class HBaseProtos { } } - // required string src_checksum = 6; public static final int SRC_CHECKSUM_FIELD_NUMBER = 6; - private java.lang.Object srcChecksum_; + private volatile java.lang.Object srcChecksum_; /** * required string src_checksum = 6; */ @@ -17807,7 +18511,6 @@ public final class HBaseProtos { } } - // optional uint32 version_major = 7; public static final int VERSION_MAJOR_FIELD_NUMBER = 7; private int versionMajor_; /** @@ -17823,7 +18526,6 @@ public final class HBaseProtos { return versionMajor_; } - // optional uint32 version_minor = 8; public static final int VERSION_MINOR_FIELD_NUMBER = 8; private int versionMinor_; /** @@ -17839,20 +18541,11 @@ public final class HBaseProtos { return versionMinor_; } - private void initFields() { - version_ = ""; - url_ = ""; - revision_ = ""; - user_ = ""; - date_ = ""; - srcChecksum_ = ""; - versionMajor_ = 0; - versionMinor_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasVersion()) { memoizedIsInitialized = 0; @@ -17884,24 +18577,23 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getVersionBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, version_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getUrlBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, url_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getRevisionBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, revision_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, getUserBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, user_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBytes(5, getDateBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, date_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBytes(6, getSrcChecksumBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, srcChecksum_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt32(7, versionMajor_); @@ -17909,38 +18601,31 @@ public final class HBaseProtos { if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeUInt32(8, versionMinor_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getVersionBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, version_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getUrlBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, url_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getRevisionBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, revision_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getUserBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, user_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(5, getDateBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, date_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(6, getSrcChecksumBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, srcChecksum_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += com.google.protobuf.CodedOutputStream @@ -17950,19 +18635,13 @@ public final class HBaseProtos { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(8, versionMinor_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -18013,12 +18692,10 @@ public final class HBaseProtos { result = result && (getVersionMinor() == other.getVersionMinor()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -18058,7 +18735,7 @@ public final class HBaseProtos { hash = (37 * hash) + VERSION_MINOR_FIELD_NUMBER; hash = (53 * hash) + getVersionMinor(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -18086,65 +18763,77 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.VersionInfo} - * *
      * Rpc client version info proto. Included in ConnectionHeader on connection setup
      * 
+ * + * Protobuf type {@code hbase.pb.VersionInfo} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.VersionInfo) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -18157,18 +18846,15 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); version_ = ""; @@ -18190,10 +18876,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_descriptor; @@ -18252,6 +18934,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo)other); @@ -18299,33 +19007,28 @@ public final class HBaseProtos { if (other.hasVersionMinor()) { setVersionMinor(other.getVersionMinor()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasVersion()) { - return false; } if (!hasUrl()) { - return false; } if (!hasRevision()) { - return false; } if (!hasUser()) { - return false; } if (!hasDate()) { - return false; } if (!hasSrcChecksum()) { - return false; } return true; @@ -18340,7 +19043,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -18350,7 +19053,6 @@ public final class HBaseProtos { } private int bitField0_; - // required string version = 1; private java.lang.Object version_ = ""; /** * required string version = 1; @@ -18364,9 +19066,12 @@ public final class HBaseProtos { public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - version_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + version_ = s; + } return s; } else { return (java.lang.String) ref; @@ -18424,7 +19129,6 @@ public final class HBaseProtos { return this; } - // required string url = 2; private java.lang.Object url_ = ""; /** * required string url = 2; @@ -18438,9 +19142,12 @@ public final class HBaseProtos { public java.lang.String getUrl() { java.lang.Object ref = url_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - url_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + url_ = s; + } return s; } else { return (java.lang.String) ref; @@ -18498,7 +19205,6 @@ public final class HBaseProtos { return this; } - // required string revision = 3; private java.lang.Object revision_ = ""; /** * required string revision = 3; @@ -18512,9 +19218,12 @@ public final class HBaseProtos { public java.lang.String getRevision() { java.lang.Object ref = revision_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - revision_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + revision_ = s; + } return s; } else { return (java.lang.String) ref; @@ -18572,7 +19281,6 @@ public final class HBaseProtos { return this; } - // required string user = 4; private java.lang.Object user_ = ""; /** * required string user = 4; @@ -18586,9 +19294,12 @@ public final class HBaseProtos { public java.lang.String getUser() { java.lang.Object ref = user_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - user_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + user_ = s; + } return s; } else { return (java.lang.String) ref; @@ -18646,7 +19357,6 @@ public final class HBaseProtos { return this; } - // required string date = 5; private java.lang.Object date_ = ""; /** * required string date = 5; @@ -18660,9 +19370,12 @@ public final class HBaseProtos { public java.lang.String getDate() { java.lang.Object ref = date_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - date_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + date_ = s; + } return s; } else { return (java.lang.String) ref; @@ -18720,7 +19433,6 @@ public final class HBaseProtos { return this; } - // required string src_checksum = 6; private java.lang.Object srcChecksum_ = ""; /** * required string src_checksum = 6; @@ -18734,9 +19446,12 @@ public final class HBaseProtos { public java.lang.String getSrcChecksum() { java.lang.Object ref = srcChecksum_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - srcChecksum_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + srcChecksum_ = s; + } return s; } else { return (java.lang.String) ref; @@ -18794,7 +19509,6 @@ public final class HBaseProtos { return this; } - // optional uint32 version_major = 7; private int versionMajor_ ; /** * optional uint32 version_major = 7; @@ -18827,7 +19541,6 @@ public final class HBaseProtos { return this; } - // optional uint32 version_minor = 8; private int versionMinor_ ; /** * optional uint32 version_minor = 8; @@ -18859,22 +19572,59 @@ public final class HBaseProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.VersionInfo) } + // @@protoc_insertion_point(class_scope:hbase.pb.VersionInfo) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo DEFAULT_INSTANCE; static { - defaultInstance = new VersionInfo(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public VersionInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new VersionInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.VersionInfo) } - public interface RegionServerInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionServerInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionServerInfo) + com.google.protobuf.MessageOrBuilder { - // optional int32 infoPort = 1; /** * optional int32 infoPort = 1; */ @@ -18884,7 +19634,6 @@ public final class HBaseProtos { */ int getInfoPort(); - // optional .hbase.pb.VersionInfo version_info = 2; /** * optional .hbase.pb.VersionInfo version_info = 2; */ @@ -18899,43 +19648,35 @@ public final class HBaseProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder(); } /** - * Protobuf type {@code hbase.pb.RegionServerInfo} - * *
    **
    * Description of the region server info
    * 
+ * + * Protobuf type {@code hbase.pb.RegionServerInfo} */ - public static final class RegionServerInfo extends - com.google.protobuf.GeneratedMessage - implements RegionServerInfoOrBuilder { + public static final class RegionServerInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionServerInfo) + RegionServerInfoOrBuilder { // Use RegionServerInfo.newBuilder() to construct. - private RegionServerInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionServerInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RegionServerInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionServerInfo defaultInstance; - public static RegionServerInfo getDefaultInstance() { - return defaultInstance; - } - - public RegionServerInfo getDefaultInstanceForType() { - return defaultInstance; + private RegionServerInfo() { + infoPort_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionServerInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -18978,7 +19719,7 @@ public final class HBaseProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -18989,30 +19730,14 @@ public final class HBaseProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionServerInfo parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionServerInfo(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional int32 infoPort = 1; public static final int INFOPORT_FIELD_NUMBER = 1; private int infoPort_; /** @@ -19028,7 +19753,6 @@ public final class HBaseProtos { return infoPort_; } - // optional .hbase.pb.VersionInfo version_info = 2; public static final int VERSION_INFO_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_; /** @@ -19041,23 +19765,20 @@ public final class HBaseProtos { * optional .hbase.pb.VersionInfo version_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { - return versionInfo_; + return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } /** * optional .hbase.pb.VersionInfo version_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() { - return versionInfo_; + return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } - private void initFields() { - infoPort_ = 0; - versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { @@ -19071,19 +19792,17 @@ public final class HBaseProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, infoPort_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, versionInfo_); + output.writeMessage(2, getVersionInfo()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -19093,21 +19812,15 @@ public final class HBaseProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, versionInfo_); + .computeMessageSize(2, getVersionInfo()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -19128,12 +19841,10 @@ public final class HBaseProtos { result = result && getVersionInfo() .equals(other.getVersionInfo()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -19149,7 +19860,7 @@ public final class HBaseProtos { hash = (37 * hash) + VERSION_INFO_FIELD_NUMBER; hash = (53 * hash) + getVersionInfo().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -19177,66 +19888,78 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RegionServerInfo} - * *
      **
      * Description of the region server info
      * 
+ * + * Protobuf type {@code hbase.pb.RegionServerInfo} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionServerInfo) + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -19249,25 +19972,22 @@ public final class HBaseProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getVersionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); infoPort_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (versionInfoBuilder_ == null) { - versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); + versionInfo_ = null; } else { versionInfoBuilder_.clear(); } @@ -19275,10 +19995,6 @@ public final class HBaseProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_descriptor; @@ -19317,6 +20033,32 @@ public final class HBaseProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo)other); @@ -19334,14 +20076,14 @@ public final class HBaseProtos { if (other.hasVersionInfo()) { mergeVersionInfo(other.getVersionInfo()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { - return false; } } @@ -19357,7 +20099,7 @@ public final class HBaseProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -19367,7 +20109,6 @@ public final class HBaseProtos { } private int bitField0_; - // optional int32 infoPort = 1; private int infoPort_ ; /** * optional int32 infoPort = 1; @@ -19400,9 +20141,8 @@ public final class HBaseProtos { return this; } - // optional .hbase.pb.VersionInfo version_info = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> versionInfoBuilder_; /** * optional .hbase.pb.VersionInfo version_info = 2; @@ -19415,7 +20155,7 @@ public final class HBaseProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { if (versionInfoBuilder_ == null) { - return versionInfo_; + return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } else { return versionInfoBuilder_.getMessage(); } @@ -19456,6 +20196,7 @@ public final class HBaseProtos { public Builder mergeVersionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo value) { if (versionInfoBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + versionInfo_ != null && versionInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance()) { versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.newBuilder(versionInfo_).mergeFrom(value).buildPartial(); @@ -19474,7 +20215,7 @@ public final class HBaseProtos { */ public Builder clearVersionInfo() { if (versionInfoBuilder_ == null) { - versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); + versionInfo_ = null; onChanged(); } else { versionInfoBuilder_.clear(); @@ -19497,168 +20238,206 @@ public final class HBaseProtos { if (versionInfoBuilder_ != null) { return versionInfoBuilder_.getMessageOrBuilder(); } else { - return versionInfo_; + return versionInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } } /** * optional .hbase.pb.VersionInfo version_info = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> getVersionInfoFieldBuilder() { if (versionInfoBuilder_ == null) { - versionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + versionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder>( - versionInfo_, + getVersionInfo(), getParentForChildren(), isClean()); versionInfo_ = null; } return versionInfoBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionServerInfo) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerInfo) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo DEFAULT_INSTANCE; static { - defaultInstance = new RegionServerInfo(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionServerInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerInfo) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableName_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TableName_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableSchema_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TableSchema_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableState_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TableState_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnFamilySchema_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FavoredNodes_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FavoredNodes_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionSpecifier_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TimeRange_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TimeRange_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ServerName_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ServerName_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Coprocessor_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Coprocessor_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NameStringPair_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NameStringPair_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NameBytesPair_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NameBytesPair_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BytesBytesPair_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NameInt64Pair_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotDescription_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureDescription_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_EmptyMsg_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_EmptyMsg_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_LongMsg_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_LongMsg_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DoubleMsg_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DoubleMsg_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BigDecimalMsg_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UUID_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UUID_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NamespaceDescriptor_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_VersionInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_VersionInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionServerInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -19727,167 +20506,167 @@ public final class HBaseProtos { "H\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_TableName_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_TableName_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TableName_descriptor, - new java.lang.String[] { "Namespace", "Qualifier", }); - internal_static_hbase_pb_TableSchema_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_TableSchema_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TableSchema_descriptor, - new java.lang.String[] { "TableName", "Attributes", "ColumnFamilies", "Configuration", }); - internal_static_hbase_pb_TableState_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_TableState_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TableState_descriptor, - new java.lang.String[] { "State", }); - internal_static_hbase_pb_ColumnFamilySchema_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ColumnFamilySchema_descriptor, - new java.lang.String[] { "Name", "Attributes", "Configuration", }); - internal_static_hbase_pb_RegionInfo_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_RegionInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionInfo_descriptor, - new java.lang.String[] { "RegionId", "TableName", "StartKey", "EndKey", "Offline", "Split", "ReplicaId", }); - internal_static_hbase_pb_FavoredNodes_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_FavoredNodes_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FavoredNodes_descriptor, - new java.lang.String[] { "FavoredNode", }); - internal_static_hbase_pb_RegionSpecifier_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionSpecifier_descriptor, - new java.lang.String[] { "Type", "Value", }); - internal_static_hbase_pb_TimeRange_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_TimeRange_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TimeRange_descriptor, - new java.lang.String[] { "From", "To", }); - internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor, - new java.lang.String[] { "ColumnFamily", "TimeRange", }); - internal_static_hbase_pb_ServerName_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_hbase_pb_ServerName_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ServerName_descriptor, - new java.lang.String[] { "HostName", "Port", "StartCode", }); - internal_static_hbase_pb_Coprocessor_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_hbase_pb_Coprocessor_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Coprocessor_descriptor, - new java.lang.String[] { "Name", }); - internal_static_hbase_pb_NameStringPair_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_hbase_pb_NameStringPair_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_NameStringPair_descriptor, - new java.lang.String[] { "Name", "Value", }); - internal_static_hbase_pb_NameBytesPair_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_hbase_pb_NameBytesPair_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_NameBytesPair_descriptor, - new java.lang.String[] { "Name", "Value", }); - internal_static_hbase_pb_BytesBytesPair_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BytesBytesPair_descriptor, - new java.lang.String[] { "First", "Second", }); - internal_static_hbase_pb_NameInt64Pair_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_NameInt64Pair_descriptor, - new java.lang.String[] { "Name", "Value", }); - internal_static_hbase_pb_SnapshotDescription_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SnapshotDescription_descriptor, - new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", "Owner", }); - internal_static_hbase_pb_ProcedureDescription_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ProcedureDescription_descriptor, - new java.lang.String[] { "Signature", "Instance", "CreationTime", "Configuration", }); - internal_static_hbase_pb_EmptyMsg_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_hbase_pb_EmptyMsg_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_EmptyMsg_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_LongMsg_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_hbase_pb_LongMsg_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_LongMsg_descriptor, - new java.lang.String[] { "LongMsg", }); - internal_static_hbase_pb_DoubleMsg_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_hbase_pb_DoubleMsg_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DoubleMsg_descriptor, - new java.lang.String[] { "DoubleMsg", }); - internal_static_hbase_pb_BigDecimalMsg_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BigDecimalMsg_descriptor, - new java.lang.String[] { "BigdecimalMsg", }); - internal_static_hbase_pb_UUID_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_hbase_pb_UUID_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_UUID_descriptor, - new java.lang.String[] { "LeastSigBits", "MostSigBits", }); - internal_static_hbase_pb_NamespaceDescriptor_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_NamespaceDescriptor_descriptor, - new java.lang.String[] { "Name", "Configuration", }); - internal_static_hbase_pb_VersionInfo_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_hbase_pb_VersionInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_VersionInfo_descriptor, - new java.lang.String[] { "Version", "Url", "Revision", "User", "Date", "SrcChecksum", "VersionMajor", "VersionMinor", }); - internal_static_hbase_pb_RegionServerInfo_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionServerInfo_descriptor, - new java.lang.String[] { "InfoPort", "VersionInfo", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_hbase_pb_TableName_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_TableName_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TableName_descriptor, + new java.lang.String[] { "Namespace", "Qualifier", }); + internal_static_hbase_pb_TableSchema_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_TableSchema_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TableSchema_descriptor, + new java.lang.String[] { "TableName", "Attributes", "ColumnFamilies", "Configuration", }); + internal_static_hbase_pb_TableState_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_TableState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TableState_descriptor, + new java.lang.String[] { "State", }); + internal_static_hbase_pb_ColumnFamilySchema_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ColumnFamilySchema_descriptor, + new java.lang.String[] { "Name", "Attributes", "Configuration", }); + internal_static_hbase_pb_RegionInfo_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_RegionInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionInfo_descriptor, + new java.lang.String[] { "RegionId", "TableName", "StartKey", "EndKey", "Offline", "Split", "ReplicaId", }); + internal_static_hbase_pb_FavoredNodes_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_FavoredNodes_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FavoredNodes_descriptor, + new java.lang.String[] { "FavoredNode", }); + internal_static_hbase_pb_RegionSpecifier_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionSpecifier_descriptor, + new java.lang.String[] { "Type", "Value", }); + internal_static_hbase_pb_TimeRange_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_TimeRange_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TimeRange_descriptor, + new java.lang.String[] { "From", "To", }); + internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor, + new java.lang.String[] { "ColumnFamily", "TimeRange", }); + internal_static_hbase_pb_ServerName_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_hbase_pb_ServerName_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ServerName_descriptor, + new java.lang.String[] { "HostName", "Port", "StartCode", }); + internal_static_hbase_pb_Coprocessor_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_hbase_pb_Coprocessor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Coprocessor_descriptor, + new java.lang.String[] { "Name", }); + internal_static_hbase_pb_NameStringPair_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_hbase_pb_NameStringPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_NameStringPair_descriptor, + new java.lang.String[] { "Name", "Value", }); + internal_static_hbase_pb_NameBytesPair_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_hbase_pb_NameBytesPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_NameBytesPair_descriptor, + new java.lang.String[] { "Name", "Value", }); + internal_static_hbase_pb_BytesBytesPair_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BytesBytesPair_descriptor, + new java.lang.String[] { "First", "Second", }); + internal_static_hbase_pb_NameInt64Pair_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_NameInt64Pair_descriptor, + new java.lang.String[] { "Name", "Value", }); + internal_static_hbase_pb_SnapshotDescription_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SnapshotDescription_descriptor, + new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", "Owner", }); + internal_static_hbase_pb_ProcedureDescription_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ProcedureDescription_descriptor, + new java.lang.String[] { "Signature", "Instance", "CreationTime", "Configuration", }); + internal_static_hbase_pb_EmptyMsg_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_hbase_pb_EmptyMsg_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_EmptyMsg_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_LongMsg_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_hbase_pb_LongMsg_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_LongMsg_descriptor, + new java.lang.String[] { "LongMsg", }); + internal_static_hbase_pb_DoubleMsg_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_hbase_pb_DoubleMsg_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DoubleMsg_descriptor, + new java.lang.String[] { "DoubleMsg", }); + internal_static_hbase_pb_BigDecimalMsg_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BigDecimalMsg_descriptor, + new java.lang.String[] { "BigdecimalMsg", }); + internal_static_hbase_pb_UUID_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_hbase_pb_UUID_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_UUID_descriptor, + new java.lang.String[] { "LeastSigBits", "MostSigBits", }); + internal_static_hbase_pb_NamespaceDescriptor_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_NamespaceDescriptor_descriptor, + new java.lang.String[] { "Name", "Configuration", }); + internal_static_hbase_pb_VersionInfo_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_hbase_pb_VersionInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_VersionInfo_descriptor, + new java.lang.String[] { "Version", "Url", "Revision", "User", "Date", "SrcChecksum", "VersionMajor", "VersionMinor", }); + internal_static_hbase_pb_RegionServerInfo_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionServerInfo_descriptor, + new java.lang.String[] { "InfoPort", "VersionInfo", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java index 99dbd72..2a3d649 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class HFileProtos { private HFileProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface FileInfoProtoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FileInfoProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FileInfoProto) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.BytesBytesPair map_entry = 1; /** * repeated .hbase.pb.BytesBytesPair map_entry = 1; */ @@ -37,42 +43,34 @@ public final class HFileProtos { int index); } /** - * Protobuf type {@code hbase.pb.FileInfoProto} - * *
    * Map of name/values
    * 
+ * + * Protobuf type {@code hbase.pb.FileInfoProto} */ - public static final class FileInfoProto extends - com.google.protobuf.GeneratedMessage - implements FileInfoProtoOrBuilder { + public static final class FileInfoProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FileInfoProto) + FileInfoProtoOrBuilder { // Use FileInfoProto.newBuilder() to construct. - private FileInfoProto(com.google.protobuf.GeneratedMessage.Builder builder) { + private FileInfoProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FileInfoProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FileInfoProto defaultInstance; - public static FileInfoProto getDefaultInstance() { - return defaultInstance; } - - public FileInfoProto getDefaultInstanceForType() { - return defaultInstance; + private FileInfoProto() { + mapEntry_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FileInfoProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -96,7 +94,8 @@ public final class HFileProtos { mapEntry_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - mapEntry_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); + mapEntry_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } } @@ -105,7 +104,7 @@ public final class HFileProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { mapEntry_ = java.util.Collections.unmodifiableList(mapEntry_); @@ -119,29 +118,13 @@ public final class HFileProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FileInfoProto parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FileInfoProto(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.BytesBytesPair map_entry = 1; public static final int MAP_ENTRY_FIELD_NUMBER = 1; private java.util.List mapEntry_; /** @@ -177,13 +160,11 @@ public final class HFileProtos { return mapEntry_.get(index); } - private void initFields() { - mapEntry_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getMapEntryCount(); i++) { if (!getMapEntry(i).isInitialized()) { @@ -197,16 +178,14 @@ public final class HFileProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < mapEntry_.size(); i++) { output.writeMessage(1, mapEntry_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -214,19 +193,13 @@ public final class HFileProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, mapEntry_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -239,12 +212,10 @@ public final class HFileProtos { boolean result = true; result = result && getMapEntryList() .equals(other.getMapEntryList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -256,7 +227,7 @@ public final class HFileProtos { hash = (37 * hash) + MAP_ENTRY_FIELD_NUMBER; hash = (53 * hash) + getMapEntryList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -284,65 +255,77 @@ public final class HFileProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.FileInfoProto} - * *
      * Map of name/values
      * 
+ * + * Protobuf type {@code hbase.pb.FileInfoProto} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProtoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FileInfoProto) + org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -355,19 +338,16 @@ public final class HFileProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getMapEntryFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (mapEntryBuilder_ == null) { @@ -379,10 +359,6 @@ public final class HFileProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor; @@ -416,6 +392,32 @@ public final class HFileProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto)other); @@ -446,21 +448,21 @@ public final class HFileProtos { mapEntry_ = other.mapEntry_; bitField0_ = (bitField0_ & ~0x00000001); mapEntryBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMapEntryFieldBuilder() : null; } else { mapEntryBuilder_.addAllMessages(other.mapEntry_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getMapEntryCount(); i++) { if (!getMapEntry(i).isInitialized()) { - return false; } } @@ -476,7 +478,7 @@ public final class HFileProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -486,7 +488,6 @@ public final class HFileProtos { } private int bitField0_; - // repeated .hbase.pb.BytesBytesPair map_entry = 1; private java.util.List mapEntry_ = java.util.Collections.emptyList(); private void ensureMapEntryIsMutable() { @@ -496,7 +497,7 @@ public final class HFileProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> mapEntryBuilder_; /** @@ -628,7 +629,8 @@ public final class HFileProtos { java.lang.Iterable values) { if (mapEntryBuilder_ == null) { ensureMapEntryIsMutable(); - super.addAll(values, mapEntry_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, mapEntry_); onChanged(); } else { mapEntryBuilder_.addAllMessages(values); @@ -711,11 +713,11 @@ public final class HFileProtos { getMapEntryBuilderList() { return getMapEntryFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getMapEntryFieldBuilder() { if (mapEntryBuilder_ == null) { - mapEntryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + mapEntryBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( mapEntry_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -725,22 +727,59 @@ public final class HFileProtos { } return mapEntryBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FileInfoProto) } + // @@protoc_insertion_point(class_scope:hbase.pb.FileInfoProto) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto DEFAULT_INSTANCE; static { - defaultInstance = new FileInfoProto(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FileInfoProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FileInfoProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FileInfoProto) } - public interface FileTrailerProtoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FileTrailerProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FileTrailerProto) + com.google.protobuf.MessageOrBuilder { - // optional uint64 file_info_offset = 1; /** * optional uint64 file_info_offset = 1; */ @@ -750,7 +789,6 @@ public final class HFileProtos { */ long getFileInfoOffset(); - // optional uint64 load_on_open_data_offset = 2; /** * optional uint64 load_on_open_data_offset = 2; */ @@ -760,7 +798,6 @@ public final class HFileProtos { */ long getLoadOnOpenDataOffset(); - // optional uint64 uncompressed_data_index_size = 3; /** * optional uint64 uncompressed_data_index_size = 3; */ @@ -770,7 +807,6 @@ public final class HFileProtos { */ long getUncompressedDataIndexSize(); - // optional uint64 total_uncompressed_bytes = 4; /** * optional uint64 total_uncompressed_bytes = 4; */ @@ -780,7 +816,6 @@ public final class HFileProtos { */ long getTotalUncompressedBytes(); - // optional uint32 data_index_count = 5; /** * optional uint32 data_index_count = 5; */ @@ -790,7 +825,6 @@ public final class HFileProtos { */ int getDataIndexCount(); - // optional uint32 meta_index_count = 6; /** * optional uint32 meta_index_count = 6; */ @@ -800,7 +834,6 @@ public final class HFileProtos { */ int getMetaIndexCount(); - // optional uint64 entry_count = 7; /** * optional uint64 entry_count = 7; */ @@ -810,7 +843,6 @@ public final class HFileProtos { */ long getEntryCount(); - // optional uint32 num_data_index_levels = 8; /** * optional uint32 num_data_index_levels = 8; */ @@ -820,7 +852,6 @@ public final class HFileProtos { */ int getNumDataIndexLevels(); - // optional uint64 first_data_block_offset = 9; /** * optional uint64 first_data_block_offset = 9; */ @@ -830,7 +861,6 @@ public final class HFileProtos { */ long getFirstDataBlockOffset(); - // optional uint64 last_data_block_offset = 10; /** * optional uint64 last_data_block_offset = 10; */ @@ -840,7 +870,6 @@ public final class HFileProtos { */ long getLastDataBlockOffset(); - // optional string comparator_class_name = 11; /** * optional string comparator_class_name = 11; */ @@ -855,7 +884,6 @@ public final class HFileProtos { com.google.protobuf.ByteString getComparatorClassNameBytes(); - // optional uint32 compression_codec = 12; /** * optional uint32 compression_codec = 12; */ @@ -865,7 +893,6 @@ public final class HFileProtos { */ int getCompressionCodec(); - // optional bytes encryption_key = 13; /** * optional bytes encryption_key = 13; */ @@ -876,42 +903,46 @@ public final class HFileProtos { com.google.protobuf.ByteString getEncryptionKey(); } /** - * Protobuf type {@code hbase.pb.FileTrailerProto} - * *
    * HFile file trailer
    * 
+ * + * Protobuf type {@code hbase.pb.FileTrailerProto} */ - public static final class FileTrailerProto extends - com.google.protobuf.GeneratedMessage - implements FileTrailerProtoOrBuilder { + public static final class FileTrailerProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FileTrailerProto) + FileTrailerProtoOrBuilder { // Use FileTrailerProto.newBuilder() to construct. - private FileTrailerProto(com.google.protobuf.GeneratedMessage.Builder builder) { + private FileTrailerProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FileTrailerProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FileTrailerProto defaultInstance; - public static FileTrailerProto getDefaultInstance() { - return defaultInstance; } - - public FileTrailerProto getDefaultInstanceForType() { - return defaultInstance; + private FileTrailerProto() { + fileInfoOffset_ = 0L; + loadOnOpenDataOffset_ = 0L; + uncompressedDataIndexSize_ = 0L; + totalUncompressedBytes_ = 0L; + dataIndexCount_ = 0; + metaIndexCount_ = 0; + entryCount_ = 0L; + numDataIndexLevels_ = 0; + firstDataBlockOffset_ = 0L; + lastDataBlockOffset_ = 0L; + comparatorClassName_ = ""; + compressionCodec_ = 0; + encryptionKey_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FileTrailerProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -981,8 +1012,9 @@ public final class HFileProtos { break; } case 90: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000400; - comparatorClassName_ = input.readBytes(); + comparatorClassName_ = bs; break; } case 96: { @@ -1001,7 +1033,7 @@ public final class HFileProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1012,30 +1044,14 @@ public final class HFileProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FileTrailerProto parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FileTrailerProto(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 file_info_offset = 1; public static final int FILE_INFO_OFFSET_FIELD_NUMBER = 1; private long fileInfoOffset_; /** @@ -1051,7 +1067,6 @@ public final class HFileProtos { return fileInfoOffset_; } - // optional uint64 load_on_open_data_offset = 2; public static final int LOAD_ON_OPEN_DATA_OFFSET_FIELD_NUMBER = 2; private long loadOnOpenDataOffset_; /** @@ -1067,7 +1082,6 @@ public final class HFileProtos { return loadOnOpenDataOffset_; } - // optional uint64 uncompressed_data_index_size = 3; public static final int UNCOMPRESSED_DATA_INDEX_SIZE_FIELD_NUMBER = 3; private long uncompressedDataIndexSize_; /** @@ -1083,7 +1097,6 @@ public final class HFileProtos { return uncompressedDataIndexSize_; } - // optional uint64 total_uncompressed_bytes = 4; public static final int TOTAL_UNCOMPRESSED_BYTES_FIELD_NUMBER = 4; private long totalUncompressedBytes_; /** @@ -1099,7 +1112,6 @@ public final class HFileProtos { return totalUncompressedBytes_; } - // optional uint32 data_index_count = 5; public static final int DATA_INDEX_COUNT_FIELD_NUMBER = 5; private int dataIndexCount_; /** @@ -1115,7 +1127,6 @@ public final class HFileProtos { return dataIndexCount_; } - // optional uint32 meta_index_count = 6; public static final int META_INDEX_COUNT_FIELD_NUMBER = 6; private int metaIndexCount_; /** @@ -1131,7 +1142,6 @@ public final class HFileProtos { return metaIndexCount_; } - // optional uint64 entry_count = 7; public static final int ENTRY_COUNT_FIELD_NUMBER = 7; private long entryCount_; /** @@ -1147,7 +1157,6 @@ public final class HFileProtos { return entryCount_; } - // optional uint32 num_data_index_levels = 8; public static final int NUM_DATA_INDEX_LEVELS_FIELD_NUMBER = 8; private int numDataIndexLevels_; /** @@ -1163,7 +1172,6 @@ public final class HFileProtos { return numDataIndexLevels_; } - // optional uint64 first_data_block_offset = 9; public static final int FIRST_DATA_BLOCK_OFFSET_FIELD_NUMBER = 9; private long firstDataBlockOffset_; /** @@ -1179,7 +1187,6 @@ public final class HFileProtos { return firstDataBlockOffset_; } - // optional uint64 last_data_block_offset = 10; public static final int LAST_DATA_BLOCK_OFFSET_FIELD_NUMBER = 10; private long lastDataBlockOffset_; /** @@ -1195,9 +1202,8 @@ public final class HFileProtos { return lastDataBlockOffset_; } - // optional string comparator_class_name = 11; public static final int COMPARATOR_CLASS_NAME_FIELD_NUMBER = 11; - private java.lang.Object comparatorClassName_; + private volatile java.lang.Object comparatorClassName_; /** * optional string comparator_class_name = 11; */ @@ -1238,7 +1244,6 @@ public final class HFileProtos { } } - // optional uint32 compression_codec = 12; public static final int COMPRESSION_CODEC_FIELD_NUMBER = 12; private int compressionCodec_; /** @@ -1254,7 +1259,6 @@ public final class HFileProtos { return compressionCodec_; } - // optional bytes encryption_key = 13; public static final int ENCRYPTION_KEY_FIELD_NUMBER = 13; private com.google.protobuf.ByteString encryptionKey_; /** @@ -1270,25 +1274,11 @@ public final class HFileProtos { return encryptionKey_; } - private void initFields() { - fileInfoOffset_ = 0L; - loadOnOpenDataOffset_ = 0L; - uncompressedDataIndexSize_ = 0L; - totalUncompressedBytes_ = 0L; - dataIndexCount_ = 0; - metaIndexCount_ = 0; - entryCount_ = 0L; - numDataIndexLevels_ = 0; - firstDataBlockOffset_ = 0L; - lastDataBlockOffset_ = 0L; - comparatorClassName_ = ""; - compressionCodec_ = 0; - encryptionKey_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -1296,7 +1286,6 @@ public final class HFileProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, fileInfoOffset_); } @@ -1328,7 +1317,7 @@ public final class HFileProtos { output.writeUInt64(10, lastDataBlockOffset_); } if (((bitField0_ & 0x00000400) == 0x00000400)) { - output.writeBytes(11, getComparatorClassNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 11, comparatorClassName_); } if (((bitField0_ & 0x00000800) == 0x00000800)) { output.writeUInt32(12, compressionCodec_); @@ -1336,12 +1325,11 @@ public final class HFileProtos { if (((bitField0_ & 0x00001000) == 0x00001000)) { output.writeBytes(13, encryptionKey_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -1386,8 +1374,7 @@ public final class HFileProtos { .computeUInt64Size(10, lastDataBlockOffset_); } if (((bitField0_ & 0x00000400) == 0x00000400)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(11, getComparatorClassNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, comparatorClassName_); } if (((bitField0_ & 0x00000800) == 0x00000800)) { size += com.google.protobuf.CodedOutputStream @@ -1397,19 +1384,13 @@ public final class HFileProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(13, encryptionKey_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1485,12 +1466,10 @@ public final class HFileProtos { result = result && getEncryptionKey() .equals(other.getEncryptionKey()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1500,19 +1479,23 @@ public final class HFileProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFileInfoOffset()) { hash = (37 * hash) + FILE_INFO_OFFSET_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getFileInfoOffset()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getFileInfoOffset()); } if (hasLoadOnOpenDataOffset()) { hash = (37 * hash) + LOAD_ON_OPEN_DATA_OFFSET_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLoadOnOpenDataOffset()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLoadOnOpenDataOffset()); } if (hasUncompressedDataIndexSize()) { hash = (37 * hash) + UNCOMPRESSED_DATA_INDEX_SIZE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getUncompressedDataIndexSize()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getUncompressedDataIndexSize()); } if (hasTotalUncompressedBytes()) { hash = (37 * hash) + TOTAL_UNCOMPRESSED_BYTES_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTotalUncompressedBytes()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTotalUncompressedBytes()); } if (hasDataIndexCount()) { hash = (37 * hash) + DATA_INDEX_COUNT_FIELD_NUMBER; @@ -1524,7 +1507,8 @@ public final class HFileProtos { } if (hasEntryCount()) { hash = (37 * hash) + ENTRY_COUNT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getEntryCount()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getEntryCount()); } if (hasNumDataIndexLevels()) { hash = (37 * hash) + NUM_DATA_INDEX_LEVELS_FIELD_NUMBER; @@ -1532,11 +1516,13 @@ public final class HFileProtos { } if (hasFirstDataBlockOffset()) { hash = (37 * hash) + FIRST_DATA_BLOCK_OFFSET_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getFirstDataBlockOffset()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getFirstDataBlockOffset()); } if (hasLastDataBlockOffset()) { hash = (37 * hash) + LAST_DATA_BLOCK_OFFSET_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLastDataBlockOffset()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLastDataBlockOffset()); } if (hasComparatorClassName()) { hash = (37 * hash) + COMPARATOR_CLASS_NAME_FIELD_NUMBER; @@ -1550,7 +1536,7 @@ public final class HFileProtos { hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER; hash = (53 * hash) + getEncryptionKey().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1578,65 +1564,77 @@ public final class HFileProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.FileTrailerProto} - * *
      * HFile file trailer
      * 
+ * + * Protobuf type {@code hbase.pb.FileTrailerProto} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProtoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FileTrailerProto) + org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1649,18 +1647,15 @@ public final class HFileProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); fileInfoOffset_ = 0L; @@ -1692,10 +1687,6 @@ public final class HFileProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor; @@ -1774,6 +1765,32 @@ public final class HFileProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto)other); @@ -1826,7 +1843,8 @@ public final class HFileProtos { if (other.hasEncryptionKey()) { setEncryptionKey(other.getEncryptionKey()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -1843,7 +1861,7 @@ public final class HFileProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1853,7 +1871,6 @@ public final class HFileProtos { } private int bitField0_; - // optional uint64 file_info_offset = 1; private long fileInfoOffset_ ; /** * optional uint64 file_info_offset = 1; @@ -1886,7 +1903,6 @@ public final class HFileProtos { return this; } - // optional uint64 load_on_open_data_offset = 2; private long loadOnOpenDataOffset_ ; /** * optional uint64 load_on_open_data_offset = 2; @@ -1919,7 +1935,6 @@ public final class HFileProtos { return this; } - // optional uint64 uncompressed_data_index_size = 3; private long uncompressedDataIndexSize_ ; /** * optional uint64 uncompressed_data_index_size = 3; @@ -1952,7 +1967,6 @@ public final class HFileProtos { return this; } - // optional uint64 total_uncompressed_bytes = 4; private long totalUncompressedBytes_ ; /** * optional uint64 total_uncompressed_bytes = 4; @@ -1985,7 +1999,6 @@ public final class HFileProtos { return this; } - // optional uint32 data_index_count = 5; private int dataIndexCount_ ; /** * optional uint32 data_index_count = 5; @@ -2018,7 +2031,6 @@ public final class HFileProtos { return this; } - // optional uint32 meta_index_count = 6; private int metaIndexCount_ ; /** * optional uint32 meta_index_count = 6; @@ -2051,7 +2063,6 @@ public final class HFileProtos { return this; } - // optional uint64 entry_count = 7; private long entryCount_ ; /** * optional uint64 entry_count = 7; @@ -2084,7 +2095,6 @@ public final class HFileProtos { return this; } - // optional uint32 num_data_index_levels = 8; private int numDataIndexLevels_ ; /** * optional uint32 num_data_index_levels = 8; @@ -2117,7 +2127,6 @@ public final class HFileProtos { return this; } - // optional uint64 first_data_block_offset = 9; private long firstDataBlockOffset_ ; /** * optional uint64 first_data_block_offset = 9; @@ -2150,7 +2159,6 @@ public final class HFileProtos { return this; } - // optional uint64 last_data_block_offset = 10; private long lastDataBlockOffset_ ; /** * optional uint64 last_data_block_offset = 10; @@ -2183,7 +2191,6 @@ public final class HFileProtos { return this; } - // optional string comparator_class_name = 11; private java.lang.Object comparatorClassName_ = ""; /** * optional string comparator_class_name = 11; @@ -2197,9 +2204,12 @@ public final class HFileProtos { public java.lang.String getComparatorClassName() { java.lang.Object ref = comparatorClassName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - comparatorClassName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + comparatorClassName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -2257,7 +2267,6 @@ public final class HFileProtos { return this; } - // optional uint32 compression_codec = 12; private int compressionCodec_ ; /** * optional uint32 compression_codec = 12; @@ -2290,7 +2299,6 @@ public final class HFileProtos { return this; } - // optional bytes encryption_key = 13; private com.google.protobuf.ByteString encryptionKey_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes encryption_key = 13; @@ -2325,34 +2333,71 @@ public final class HFileProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FileTrailerProto) } + // @@protoc_insertion_point(class_scope:hbase.pb.FileTrailerProto) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto DEFAULT_INSTANCE; static { - defaultInstance = new FileTrailerProto(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FileTrailerProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FileTrailerProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FileTrailerProto) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FileInfoProto_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FileInfoProto_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FileTrailerProto_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -2373,30 +2418,31 @@ public final class HFileProtos { "leProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_FileInfoProto_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_FileInfoProto_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FileInfoProto_descriptor, - new java.lang.String[] { "MapEntry", }); - internal_static_hbase_pb_FileTrailerProto_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FileTrailerProto_descriptor, - new java.lang.String[] { "FileInfoOffset", "LoadOnOpenDataOffset", "UncompressedDataIndexSize", "TotalUncompressedBytes", "DataIndexCount", "MetaIndexCount", "EntryCount", "NumDataIndexLevels", "FirstDataBlockOffset", "LastDataBlockOffset", "ComparatorClassName", "CompressionCodec", "EncryptionKey", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_FileInfoProto_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_FileInfoProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FileInfoProto_descriptor, + new java.lang.String[] { "MapEntry", }); + internal_static_hbase_pb_FileTrailerProto_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FileTrailerProto_descriptor, + new java.lang.String[] { "FileInfoOffset", "LoadOnOpenDataOffset", "UncompressedDataIndexSize", "TotalUncompressedBytes", "DataIndexCount", "MetaIndexCount", "EntryCount", "NumDataIndexLevels", "FirstDataBlockOffset", "LastDataBlockOffset", "ComparatorClassName", "CompressionCodec", "EncryptionKey", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LoadBalancerProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LoadBalancerProtos.java index f088678..2226f76 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LoadBalancerProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LoadBalancerProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class LoadBalancerProtos { private LoadBalancerProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface LoadBalancerStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface LoadBalancerStateOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.LoadBalancerState) + com.google.protobuf.MessageOrBuilder { - // optional bool balancer_on = 1; /** * optional bool balancer_on = 1; */ @@ -24,36 +30,28 @@ public final class LoadBalancerProtos { /** * Protobuf type {@code hbase.pb.LoadBalancerState} */ - public static final class LoadBalancerState extends - com.google.protobuf.GeneratedMessage - implements LoadBalancerStateOrBuilder { + public static final class LoadBalancerState extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.LoadBalancerState) + LoadBalancerStateOrBuilder { // Use LoadBalancerState.newBuilder() to construct. - private LoadBalancerState(com.google.protobuf.GeneratedMessage.Builder builder) { + private LoadBalancerState(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private LoadBalancerState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final LoadBalancerState defaultInstance; - public static LoadBalancerState getDefaultInstance() { - return defaultInstance; } - - public LoadBalancerState getDefaultInstanceForType() { - return defaultInstance; + private LoadBalancerState() { + balancerOn_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private LoadBalancerState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -83,7 +81,7 @@ public final class LoadBalancerProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -94,30 +92,14 @@ public final class LoadBalancerProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.internal_static_hbase_pb_LoadBalancerState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.internal_static_hbase_pb_LoadBalancerState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public LoadBalancerState parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new LoadBalancerState(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool balancer_on = 1; public static final int BALANCER_ON_FIELD_NUMBER = 1; private boolean balancerOn_; /** @@ -133,13 +115,11 @@ public final class LoadBalancerProtos { return balancerOn_; } - private void initFields() { - balancerOn_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -147,16 +127,14 @@ public final class LoadBalancerProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, balancerOn_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -164,19 +142,13 @@ public final class LoadBalancerProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, balancerOn_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -192,12 +164,10 @@ public final class LoadBalancerProtos { result = result && (getBalancerOn() == other.getBalancerOn()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -207,9 +177,10 @@ public final class LoadBalancerProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBalancerOn()) { hash = (37 * hash) + BALANCER_ON_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getBalancerOn()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getBalancerOn()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -237,46 +208,57 @@ public final class LoadBalancerProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -284,14 +266,15 @@ public final class LoadBalancerProtos { * Protobuf type {@code hbase.pb.LoadBalancerState} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerStateOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.LoadBalancerState) + org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.internal_static_hbase_pb_LoadBalancerState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.internal_static_hbase_pb_LoadBalancerState_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -304,18 +287,15 @@ public final class LoadBalancerProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); balancerOn_ = false; @@ -323,10 +303,6 @@ public final class LoadBalancerProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.internal_static_hbase_pb_LoadBalancerState_descriptor; @@ -357,6 +333,32 @@ public final class LoadBalancerProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState)other); @@ -371,7 +373,8 @@ public final class LoadBalancerProtos { if (other.hasBalancerOn()) { setBalancerOn(other.getBalancerOn()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -388,7 +391,7 @@ public final class LoadBalancerProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -398,7 +401,6 @@ public final class LoadBalancerProtos { } private int bitField0_; - // optional bool balancer_on = 1; private boolean balancerOn_ ; /** * optional bool balancer_on = 1; @@ -430,29 +432,66 @@ public final class LoadBalancerProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.LoadBalancerState) } + // @@protoc_insertion_point(class_scope:hbase.pb.LoadBalancerState) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState DEFAULT_INSTANCE; static { - defaultInstance = new LoadBalancerState(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public LoadBalancerState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new LoadBalancerState(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.LoadBalancerState) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_LoadBalancerState_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_LoadBalancerState_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -462,23 +501,23 @@ public final class LoadBalancerProtos { "ratedB\022LoadBalancerProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_LoadBalancerState_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_LoadBalancerState_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_LoadBalancerState_descriptor, - new java.lang.String[] { "BalancerOn", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_hbase_pb_LoadBalancerState_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_LoadBalancerState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_LoadBalancerState_descriptor, + new java.lang.String[] { "BalancerOn", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java index ee901f0..b5abf42 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class MapReduceProtos { private MapReduceProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface ScanMetricsOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ScanMetricsOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ScanMetrics) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.NameInt64Pair metrics = 1; /** * repeated .hbase.pb.NameInt64Pair metrics = 1; */ @@ -39,36 +45,28 @@ public final class MapReduceProtos { /** * Protobuf type {@code hbase.pb.ScanMetrics} */ - public static final class ScanMetrics extends - com.google.protobuf.GeneratedMessage - implements ScanMetricsOrBuilder { + public static final class ScanMetrics extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ScanMetrics) + ScanMetricsOrBuilder { // Use ScanMetrics.newBuilder() to construct. - private ScanMetrics(com.google.protobuf.GeneratedMessage.Builder builder) { + private ScanMetrics(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ScanMetrics(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ScanMetrics defaultInstance; - public static ScanMetrics getDefaultInstance() { - return defaultInstance; } - - public ScanMetrics getDefaultInstanceForType() { - return defaultInstance; + private ScanMetrics() { + metrics_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ScanMetrics( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -92,7 +90,8 @@ public final class MapReduceProtos { metrics_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - metrics_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.PARSER, extensionRegistry)); + metrics_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.PARSER, extensionRegistry)); break; } } @@ -101,7 +100,7 @@ public final class MapReduceProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { metrics_ = java.util.Collections.unmodifiableList(metrics_); @@ -115,29 +114,13 @@ public final class MapReduceProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ScanMetrics parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ScanMetrics(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.NameInt64Pair metrics = 1; public static final int METRICS_FIELD_NUMBER = 1; private java.util.List metrics_; /** @@ -173,13 +156,11 @@ public final class MapReduceProtos { return metrics_.get(index); } - private void initFields() { - metrics_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -187,16 +168,14 @@ public final class MapReduceProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < metrics_.size(); i++) { output.writeMessage(1, metrics_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -204,19 +183,13 @@ public final class MapReduceProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, metrics_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -229,12 +202,10 @@ public final class MapReduceProtos { boolean result = true; result = result && getMetricsList() .equals(other.getMetricsList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -246,7 +217,7 @@ public final class MapReduceProtos { hash = (37 * hash) + METRICS_FIELD_NUMBER; hash = (53 * hash) + getMetricsList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -274,46 +245,57 @@ public final class MapReduceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -321,14 +303,15 @@ public final class MapReduceProtos { * Protobuf type {@code hbase.pb.ScanMetrics} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ScanMetrics) + org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -341,19 +324,16 @@ public final class MapReduceProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getMetricsFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (metricsBuilder_ == null) { @@ -365,10 +345,6 @@ public final class MapReduceProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_descriptor; @@ -402,6 +378,32 @@ public final class MapReduceProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics)other); @@ -432,14 +434,15 @@ public final class MapReduceProtos { metrics_ = other.metrics_; bitField0_ = (bitField0_ & ~0x00000001); metricsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMetricsFieldBuilder() : null; } else { metricsBuilder_.addAllMessages(other.metrics_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -456,7 +459,7 @@ public final class MapReduceProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -466,7 +469,6 @@ public final class MapReduceProtos { } private int bitField0_; - // repeated .hbase.pb.NameInt64Pair metrics = 1; private java.util.List metrics_ = java.util.Collections.emptyList(); private void ensureMetricsIsMutable() { @@ -476,7 +478,7 @@ public final class MapReduceProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder> metricsBuilder_; /** @@ -608,7 +610,8 @@ public final class MapReduceProtos { java.lang.Iterable values) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); - super.addAll(values, metrics_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, metrics_); onChanged(); } else { metricsBuilder_.addAllMessages(values); @@ -691,11 +694,11 @@ public final class MapReduceProtos { getMetricsBuilderList() { return getMetricsFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder> getMetricsFieldBuilder() { if (metricsBuilder_ == null) { - metricsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + metricsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>( metrics_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -705,27 +708,64 @@ public final class MapReduceProtos { } return metricsBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ScanMetrics) } + // @@protoc_insertion_point(class_scope:hbase.pb.ScanMetrics) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics DEFAULT_INSTANCE; static { - defaultInstance = new ScanMetrics(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ScanMetrics parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ScanMetrics(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ScanMetrics) } - public interface TableSnapshotRegionSplitOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TableSnapshotRegionSplitOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TableSnapshotRegionSplit) + com.google.protobuf.MessageOrBuilder { - // repeated string locations = 2; /** * repeated string locations = 2; */ java.util.List - getLocationsList(); + getLocationsList(); /** * repeated string locations = 2; */ @@ -740,7 +780,6 @@ public final class MapReduceProtos { com.google.protobuf.ByteString getLocationsBytes(int index); - // optional .hbase.pb.TableSchema table = 3; /** * optional .hbase.pb.TableSchema table = 3; */ @@ -754,7 +793,6 @@ public final class MapReduceProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableOrBuilder(); - // optional .hbase.pb.RegionInfo region = 4; /** * optional .hbase.pb.RegionInfo region = 4; */ @@ -771,36 +809,28 @@ public final class MapReduceProtos { /** * Protobuf type {@code hbase.pb.TableSnapshotRegionSplit} */ - public static final class TableSnapshotRegionSplit extends - com.google.protobuf.GeneratedMessage - implements TableSnapshotRegionSplitOrBuilder { + public static final class TableSnapshotRegionSplit extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TableSnapshotRegionSplit) + TableSnapshotRegionSplitOrBuilder { // Use TableSnapshotRegionSplit.newBuilder() to construct. - private TableSnapshotRegionSplit(com.google.protobuf.GeneratedMessage.Builder builder) { + private TableSnapshotRegionSplit(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TableSnapshotRegionSplit(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TableSnapshotRegionSplit defaultInstance; - public static TableSnapshotRegionSplit getDefaultInstance() { - return defaultInstance; } - - public TableSnapshotRegionSplit getDefaultInstanceForType() { - return defaultInstance; + private TableSnapshotRegionSplit() { + locations_ = com.google.protobuf.LazyStringArrayList.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TableSnapshotRegionSplit( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -820,11 +850,12 @@ public final class MapReduceProtos { break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { locations_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } - locations_.add(input.readBytes()); + locations_.add(bs); break; } case 26: { @@ -859,10 +890,10 @@ public final class MapReduceProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - locations_ = new com.google.protobuf.UnmodifiableLazyStringList(locations_); + locations_ = locations_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -873,36 +904,20 @@ public final class MapReduceProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_TableSnapshotRegionSplit_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TableSnapshotRegionSplit parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TableSnapshotRegionSplit(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // repeated string locations = 2; public static final int LOCATIONS_FIELD_NUMBER = 2; private com.google.protobuf.LazyStringList locations_; /** * repeated string locations = 2; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getLocationsList() { return locations_; } @@ -926,7 +941,6 @@ public final class MapReduceProtos { return locations_.getByteString(index); } - // optional .hbase.pb.TableSchema table = 3; public static final int TABLE_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema table_; /** @@ -939,16 +953,15 @@ public final class MapReduceProtos { * optional .hbase.pb.TableSchema table = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTable() { - return table_; + return table_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : table_; } /** * optional .hbase.pb.TableSchema table = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableOrBuilder() { - return table_; + return table_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : table_; } - // optional .hbase.pb.RegionInfo region = 4; public static final int REGION_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo region_; /** @@ -961,24 +974,20 @@ public final class MapReduceProtos { * optional .hbase.pb.RegionInfo region = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } /** * optional .hbase.pb.RegionInfo region = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } - private void initFields() { - locations_ = com.google.protobuf.LazyStringArrayList.EMPTY; - table_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasTable()) { if (!getTable().isInitialized()) { @@ -998,55 +1007,46 @@ public final class MapReduceProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < locations_.size(); i++) { - output.writeBytes(2, locations_.getByteString(i)); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, locations_.getRaw(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(3, table_); + output.writeMessage(3, getTable()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(4, region_); + output.writeMessage(4, getRegion()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < locations_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(locations_.getByteString(i)); + dataSize += computeStringSizeNoTag(locations_.getRaw(i)); } size += dataSize; size += 1 * getLocationsList().size(); } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, table_); + .computeMessageSize(3, getTable()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, region_); + .computeMessageSize(4, getRegion()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1069,12 +1069,10 @@ public final class MapReduceProtos { result = result && getRegion() .equals(other.getRegion()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1094,7 +1092,7 @@ public final class MapReduceProtos { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1122,46 +1120,57 @@ public final class MapReduceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1169,14 +1178,15 @@ public final class MapReduceProtos { * Protobuf type {@code hbase.pb.TableSnapshotRegionSplit} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplitOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TableSnapshotRegionSplit) + org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplitOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_TableSnapshotRegionSplit_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1189,32 +1199,29 @@ public final class MapReduceProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableFieldBuilder(); getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); locations_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (tableBuilder_ == null) { - table_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + table_ = null; } else { tableBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -1222,10 +1229,6 @@ public final class MapReduceProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor; @@ -1248,8 +1251,7 @@ public final class MapReduceProtos { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - locations_ = new com.google.protobuf.UnmodifiableLazyStringList( - locations_); + locations_ = locations_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.locations_ = locations_; @@ -1274,6 +1276,32 @@ public final class MapReduceProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit)other); @@ -1301,20 +1329,19 @@ public final class MapReduceProtos { if (other.hasRegion()) { mergeRegion(other.getRegion()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasTable()) { if (!getTable().isInitialized()) { - return false; } } if (hasRegion()) { if (!getRegion().isInitialized()) { - return false; } } @@ -1330,7 +1357,7 @@ public final class MapReduceProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1340,7 +1367,6 @@ public final class MapReduceProtos { } private int bitField0_; - // repeated string locations = 2; private com.google.protobuf.LazyStringList locations_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureLocationsIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { @@ -1351,9 +1377,9 @@ public final class MapReduceProtos { /** * repeated string locations = 2; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getLocationsList() { - return java.util.Collections.unmodifiableList(locations_); + return locations_.getUnmodifiableView(); } /** * repeated string locations = 2; @@ -1406,7 +1432,8 @@ public final class MapReduceProtos { public Builder addAllLocations( java.lang.Iterable values) { ensureLocationsIsMutable(); - super.addAll(values, locations_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, locations_); onChanged(); return this; } @@ -1433,9 +1460,8 @@ public final class MapReduceProtos { return this; } - // optional .hbase.pb.TableSchema table = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema table_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema table_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableBuilder_; /** * optional .hbase.pb.TableSchema table = 3; @@ -1448,7 +1474,7 @@ public final class MapReduceProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTable() { if (tableBuilder_ == null) { - return table_; + return table_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : table_; } else { return tableBuilder_.getMessage(); } @@ -1489,6 +1515,7 @@ public final class MapReduceProtos { public Builder mergeTable(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (tableBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + table_ != null && table_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { table_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(table_).mergeFrom(value).buildPartial(); @@ -1507,7 +1534,7 @@ public final class MapReduceProtos { */ public Builder clearTable() { if (tableBuilder_ == null) { - table_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + table_ = null; onChanged(); } else { tableBuilder_.clear(); @@ -1530,19 +1557,20 @@ public final class MapReduceProtos { if (tableBuilder_ != null) { return tableBuilder_.getMessageOrBuilder(); } else { - return table_; + return table_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : table_; } } /** * optional .hbase.pb.TableSchema table = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableFieldBuilder() { if (tableBuilder_ == null) { - tableBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - table_, + getTable(), getParentForChildren(), isClean()); table_ = null; @@ -1550,9 +1578,8 @@ public final class MapReduceProtos { return tableBuilder_; } - // optional .hbase.pb.RegionInfo region = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_; /** * optional .hbase.pb.RegionInfo region = 4; @@ -1565,7 +1592,7 @@ public final class MapReduceProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -1606,6 +1633,7 @@ public final class MapReduceProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -1624,7 +1652,7 @@ public final class MapReduceProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -1647,53 +1675,91 @@ public final class MapReduceProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : region_; } } /** * optional .hbase.pb.RegionInfo region = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TableSnapshotRegionSplit) } + // @@protoc_insertion_point(class_scope:hbase.pb.TableSnapshotRegionSplit) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit DEFAULT_INSTANCE; static { - defaultInstance = new TableSnapshotRegionSplit(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TableSnapshotRegionSplit parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TableSnapshotRegionSplit(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TableSnapshotRegionSplit) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ScanMetrics_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ScanMetrics_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TableSnapshotRegionSplit_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -1707,30 +1773,31 @@ public final class MapReduceProtos { "duceProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_ScanMetrics_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_ScanMetrics_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ScanMetrics_descriptor, - new java.lang.String[] { "Metrics", }); - internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_TableSnapshotRegionSplit_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor, - new java.lang.String[] { "Locations", "Table", "Region", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_ScanMetrics_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_ScanMetrics_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ScanMetrics_descriptor, + new java.lang.String[] { "Metrics", }); + internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_TableSnapshotRegionSplit_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor, + new java.lang.String[] { "Locations", "Table", "Region", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java index a758109..bc8d40b 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java @@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class MasterProcedureProtos { private MasterProcedureProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } /** * Protobuf enum {@code hbase.pb.CreateTableState} @@ -16,27 +22,27 @@ public final class MasterProcedureProtos { /** * CREATE_TABLE_PRE_OPERATION = 1; */ - CREATE_TABLE_PRE_OPERATION(0, 1), + CREATE_TABLE_PRE_OPERATION(1), /** * CREATE_TABLE_WRITE_FS_LAYOUT = 2; */ - CREATE_TABLE_WRITE_FS_LAYOUT(1, 2), + CREATE_TABLE_WRITE_FS_LAYOUT(2), /** * CREATE_TABLE_ADD_TO_META = 3; */ - CREATE_TABLE_ADD_TO_META(2, 3), + CREATE_TABLE_ADD_TO_META(3), /** * CREATE_TABLE_ASSIGN_REGIONS = 4; */ - CREATE_TABLE_ASSIGN_REGIONS(3, 4), + CREATE_TABLE_ASSIGN_REGIONS(4), /** * CREATE_TABLE_UPDATE_DESC_CACHE = 5; */ - CREATE_TABLE_UPDATE_DESC_CACHE(4, 5), + CREATE_TABLE_UPDATE_DESC_CACHE(5), /** * CREATE_TABLE_POST_OPERATION = 6; */ - CREATE_TABLE_POST_OPERATION(5, 6), + CREATE_TABLE_POST_OPERATION(6), ; /** @@ -65,9 +71,19 @@ public final class MasterProcedureProtos { public static final int CREATE_TABLE_POST_OPERATION_VALUE = 6; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static CreateTableState valueOf(int value) { + return forNumber(value); + } + + public static CreateTableState forNumber(int value) { switch (value) { case 1: return CREATE_TABLE_PRE_OPERATION; case 2: return CREATE_TABLE_WRITE_FS_LAYOUT; @@ -83,17 +99,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + CreateTableState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public CreateTableState findValueByNumber(int number) { - return CreateTableState.valueOf(number); + return CreateTableState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -115,11 +131,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private CreateTableState(int index, int value) { - this.index = index; + private CreateTableState(int value) { this.value = value; } @@ -134,31 +148,31 @@ public final class MasterProcedureProtos { /** * MODIFY_TABLE_PREPARE = 1; */ - MODIFY_TABLE_PREPARE(0, 1), + MODIFY_TABLE_PREPARE(1), /** * MODIFY_TABLE_PRE_OPERATION = 2; */ - MODIFY_TABLE_PRE_OPERATION(1, 2), + MODIFY_TABLE_PRE_OPERATION(2), /** * MODIFY_TABLE_UPDATE_TABLE_DESCRIPTOR = 3; */ - MODIFY_TABLE_UPDATE_TABLE_DESCRIPTOR(2, 3), + MODIFY_TABLE_UPDATE_TABLE_DESCRIPTOR(3), /** * MODIFY_TABLE_REMOVE_REPLICA_COLUMN = 4; */ - MODIFY_TABLE_REMOVE_REPLICA_COLUMN(3, 4), + MODIFY_TABLE_REMOVE_REPLICA_COLUMN(4), /** * MODIFY_TABLE_DELETE_FS_LAYOUT = 5; */ - MODIFY_TABLE_DELETE_FS_LAYOUT(4, 5), + MODIFY_TABLE_DELETE_FS_LAYOUT(5), /** * MODIFY_TABLE_POST_OPERATION = 6; */ - MODIFY_TABLE_POST_OPERATION(5, 6), + MODIFY_TABLE_POST_OPERATION(6), /** * MODIFY_TABLE_REOPEN_ALL_REGIONS = 7; */ - MODIFY_TABLE_REOPEN_ALL_REGIONS(6, 7), + MODIFY_TABLE_REOPEN_ALL_REGIONS(7), ; /** @@ -191,9 +205,19 @@ public final class MasterProcedureProtos { public static final int MODIFY_TABLE_REOPEN_ALL_REGIONS_VALUE = 7; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static ModifyTableState valueOf(int value) { + return forNumber(value); + } + + public static ModifyTableState forNumber(int value) { switch (value) { case 1: return MODIFY_TABLE_PREPARE; case 2: return MODIFY_TABLE_PRE_OPERATION; @@ -210,17 +234,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + ModifyTableState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public ModifyTableState findValueByNumber(int number) { - return ModifyTableState.valueOf(number); + return ModifyTableState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -242,11 +266,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private ModifyTableState(int index, int value) { - this.index = index; + private ModifyTableState(int value) { this.value = value; } @@ -261,31 +283,31 @@ public final class MasterProcedureProtos { /** * TRUNCATE_TABLE_PRE_OPERATION = 1; */ - TRUNCATE_TABLE_PRE_OPERATION(0, 1), + TRUNCATE_TABLE_PRE_OPERATION(1), /** * TRUNCATE_TABLE_REMOVE_FROM_META = 2; */ - TRUNCATE_TABLE_REMOVE_FROM_META(1, 2), + TRUNCATE_TABLE_REMOVE_FROM_META(2), /** * TRUNCATE_TABLE_CLEAR_FS_LAYOUT = 3; */ - TRUNCATE_TABLE_CLEAR_FS_LAYOUT(2, 3), + TRUNCATE_TABLE_CLEAR_FS_LAYOUT(3), /** * TRUNCATE_TABLE_CREATE_FS_LAYOUT = 4; */ - TRUNCATE_TABLE_CREATE_FS_LAYOUT(3, 4), + TRUNCATE_TABLE_CREATE_FS_LAYOUT(4), /** * TRUNCATE_TABLE_ADD_TO_META = 5; */ - TRUNCATE_TABLE_ADD_TO_META(4, 5), + TRUNCATE_TABLE_ADD_TO_META(5), /** * TRUNCATE_TABLE_ASSIGN_REGIONS = 6; */ - TRUNCATE_TABLE_ASSIGN_REGIONS(5, 6), + TRUNCATE_TABLE_ASSIGN_REGIONS(6), /** * TRUNCATE_TABLE_POST_OPERATION = 7; */ - TRUNCATE_TABLE_POST_OPERATION(6, 7), + TRUNCATE_TABLE_POST_OPERATION(7), ; /** @@ -318,9 +340,19 @@ public final class MasterProcedureProtos { public static final int TRUNCATE_TABLE_POST_OPERATION_VALUE = 7; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static TruncateTableState valueOf(int value) { + return forNumber(value); + } + + public static TruncateTableState forNumber(int value) { switch (value) { case 1: return TRUNCATE_TABLE_PRE_OPERATION; case 2: return TRUNCATE_TABLE_REMOVE_FROM_META; @@ -337,17 +369,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + TruncateTableState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public TruncateTableState findValueByNumber(int number) { - return TruncateTableState.valueOf(number); + return TruncateTableState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -369,11 +401,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private TruncateTableState(int index, int value) { - this.index = index; + private TruncateTableState(int value) { this.value = value; } @@ -388,27 +418,27 @@ public final class MasterProcedureProtos { /** * DELETE_TABLE_PRE_OPERATION = 1; */ - DELETE_TABLE_PRE_OPERATION(0, 1), + DELETE_TABLE_PRE_OPERATION(1), /** * DELETE_TABLE_REMOVE_FROM_META = 2; */ - DELETE_TABLE_REMOVE_FROM_META(1, 2), + DELETE_TABLE_REMOVE_FROM_META(2), /** * DELETE_TABLE_CLEAR_FS_LAYOUT = 3; */ - DELETE_TABLE_CLEAR_FS_LAYOUT(2, 3), + DELETE_TABLE_CLEAR_FS_LAYOUT(3), /** * DELETE_TABLE_UPDATE_DESC_CACHE = 4; */ - DELETE_TABLE_UPDATE_DESC_CACHE(3, 4), + DELETE_TABLE_UPDATE_DESC_CACHE(4), /** * DELETE_TABLE_UNASSIGN_REGIONS = 5; */ - DELETE_TABLE_UNASSIGN_REGIONS(4, 5), + DELETE_TABLE_UNASSIGN_REGIONS(5), /** * DELETE_TABLE_POST_OPERATION = 6; */ - DELETE_TABLE_POST_OPERATION(5, 6), + DELETE_TABLE_POST_OPERATION(6), ; /** @@ -437,9 +467,19 @@ public final class MasterProcedureProtos { public static final int DELETE_TABLE_POST_OPERATION_VALUE = 6; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static DeleteTableState valueOf(int value) { + return forNumber(value); + } + + public static DeleteTableState forNumber(int value) { switch (value) { case 1: return DELETE_TABLE_PRE_OPERATION; case 2: return DELETE_TABLE_REMOVE_FROM_META; @@ -455,17 +495,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + DeleteTableState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public DeleteTableState findValueByNumber(int number) { - return DeleteTableState.valueOf(number); + return DeleteTableState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -487,11 +527,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private DeleteTableState(int index, int value) { - this.index = index; + private DeleteTableState(int value) { this.value = value; } @@ -506,23 +544,23 @@ public final class MasterProcedureProtos { /** * CREATE_NAMESPACE_PREPARE = 1; */ - CREATE_NAMESPACE_PREPARE(0, 1), + CREATE_NAMESPACE_PREPARE(1), /** * CREATE_NAMESPACE_CREATE_DIRECTORY = 2; */ - CREATE_NAMESPACE_CREATE_DIRECTORY(1, 2), + CREATE_NAMESPACE_CREATE_DIRECTORY(2), /** * CREATE_NAMESPACE_INSERT_INTO_NS_TABLE = 3; */ - CREATE_NAMESPACE_INSERT_INTO_NS_TABLE(2, 3), + CREATE_NAMESPACE_INSERT_INTO_NS_TABLE(3), /** * CREATE_NAMESPACE_UPDATE_ZK = 4; */ - CREATE_NAMESPACE_UPDATE_ZK(3, 4), + CREATE_NAMESPACE_UPDATE_ZK(4), /** * CREATE_NAMESPACE_SET_NAMESPACE_QUOTA = 5; */ - CREATE_NAMESPACE_SET_NAMESPACE_QUOTA(4, 5), + CREATE_NAMESPACE_SET_NAMESPACE_QUOTA(5), ; /** @@ -547,9 +585,19 @@ public final class MasterProcedureProtos { public static final int CREATE_NAMESPACE_SET_NAMESPACE_QUOTA_VALUE = 5; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static CreateNamespaceState valueOf(int value) { + return forNumber(value); + } + + public static CreateNamespaceState forNumber(int value) { switch (value) { case 1: return CREATE_NAMESPACE_PREPARE; case 2: return CREATE_NAMESPACE_CREATE_DIRECTORY; @@ -564,17 +612,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + CreateNamespaceState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public CreateNamespaceState findValueByNumber(int number) { - return CreateNamespaceState.valueOf(number); + return CreateNamespaceState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -596,11 +644,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private CreateNamespaceState(int index, int value) { - this.index = index; + private CreateNamespaceState(int value) { this.value = value; } @@ -615,15 +661,15 @@ public final class MasterProcedureProtos { /** * MODIFY_NAMESPACE_PREPARE = 1; */ - MODIFY_NAMESPACE_PREPARE(0, 1), + MODIFY_NAMESPACE_PREPARE(1), /** * MODIFY_NAMESPACE_UPDATE_NS_TABLE = 2; */ - MODIFY_NAMESPACE_UPDATE_NS_TABLE(1, 2), + MODIFY_NAMESPACE_UPDATE_NS_TABLE(2), /** * MODIFY_NAMESPACE_UPDATE_ZK = 3; */ - MODIFY_NAMESPACE_UPDATE_ZK(2, 3), + MODIFY_NAMESPACE_UPDATE_ZK(3), ; /** @@ -640,9 +686,19 @@ public final class MasterProcedureProtos { public static final int MODIFY_NAMESPACE_UPDATE_ZK_VALUE = 3; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static ModifyNamespaceState valueOf(int value) { + return forNumber(value); + } + + public static ModifyNamespaceState forNumber(int value) { switch (value) { case 1: return MODIFY_NAMESPACE_PREPARE; case 2: return MODIFY_NAMESPACE_UPDATE_NS_TABLE; @@ -655,17 +711,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + ModifyNamespaceState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public ModifyNamespaceState findValueByNumber(int number) { - return ModifyNamespaceState.valueOf(number); + return ModifyNamespaceState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -687,11 +743,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private ModifyNamespaceState(int index, int value) { - this.index = index; + private ModifyNamespaceState(int value) { this.value = value; } @@ -706,23 +760,23 @@ public final class MasterProcedureProtos { /** * DELETE_NAMESPACE_PREPARE = 1; */ - DELETE_NAMESPACE_PREPARE(0, 1), + DELETE_NAMESPACE_PREPARE(1), /** * DELETE_NAMESPACE_DELETE_FROM_NS_TABLE = 2; */ - DELETE_NAMESPACE_DELETE_FROM_NS_TABLE(1, 2), + DELETE_NAMESPACE_DELETE_FROM_NS_TABLE(2), /** * DELETE_NAMESPACE_REMOVE_FROM_ZK = 3; */ - DELETE_NAMESPACE_REMOVE_FROM_ZK(2, 3), + DELETE_NAMESPACE_REMOVE_FROM_ZK(3), /** * DELETE_NAMESPACE_DELETE_DIRECTORIES = 4; */ - DELETE_NAMESPACE_DELETE_DIRECTORIES(3, 4), + DELETE_NAMESPACE_DELETE_DIRECTORIES(4), /** * DELETE_NAMESPACE_REMOVE_NAMESPACE_QUOTA = 5; */ - DELETE_NAMESPACE_REMOVE_NAMESPACE_QUOTA(4, 5), + DELETE_NAMESPACE_REMOVE_NAMESPACE_QUOTA(5), ; /** @@ -747,9 +801,19 @@ public final class MasterProcedureProtos { public static final int DELETE_NAMESPACE_REMOVE_NAMESPACE_QUOTA_VALUE = 5; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static DeleteNamespaceState valueOf(int value) { + return forNumber(value); + } + + public static DeleteNamespaceState forNumber(int value) { switch (value) { case 1: return DELETE_NAMESPACE_PREPARE; case 2: return DELETE_NAMESPACE_DELETE_FROM_NS_TABLE; @@ -764,17 +828,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + DeleteNamespaceState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public DeleteNamespaceState findValueByNumber(int number) { - return DeleteNamespaceState.valueOf(number); + return DeleteNamespaceState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -796,11 +860,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private DeleteNamespaceState(int index, int value) { - this.index = index; + private DeleteNamespaceState(int value) { this.value = value; } @@ -815,23 +877,23 @@ public final class MasterProcedureProtos { /** * ADD_COLUMN_FAMILY_PREPARE = 1; */ - ADD_COLUMN_FAMILY_PREPARE(0, 1), + ADD_COLUMN_FAMILY_PREPARE(1), /** * ADD_COLUMN_FAMILY_PRE_OPERATION = 2; */ - ADD_COLUMN_FAMILY_PRE_OPERATION(1, 2), + ADD_COLUMN_FAMILY_PRE_OPERATION(2), /** * ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3; */ - ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR(2, 3), + ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR(3), /** * ADD_COLUMN_FAMILY_POST_OPERATION = 4; */ - ADD_COLUMN_FAMILY_POST_OPERATION(3, 4), + ADD_COLUMN_FAMILY_POST_OPERATION(4), /** * ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 5; */ - ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS(4, 5), + ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS(5), ; /** @@ -856,9 +918,19 @@ public final class MasterProcedureProtos { public static final int ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS_VALUE = 5; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static AddColumnFamilyState valueOf(int value) { + return forNumber(value); + } + + public static AddColumnFamilyState forNumber(int value) { switch (value) { case 1: return ADD_COLUMN_FAMILY_PREPARE; case 2: return ADD_COLUMN_FAMILY_PRE_OPERATION; @@ -873,17 +945,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + AddColumnFamilyState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public AddColumnFamilyState findValueByNumber(int number) { - return AddColumnFamilyState.valueOf(number); + return AddColumnFamilyState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -905,11 +977,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private AddColumnFamilyState(int index, int value) { - this.index = index; + private AddColumnFamilyState(int value) { this.value = value; } @@ -924,23 +994,23 @@ public final class MasterProcedureProtos { /** * MODIFY_COLUMN_FAMILY_PREPARE = 1; */ - MODIFY_COLUMN_FAMILY_PREPARE(0, 1), + MODIFY_COLUMN_FAMILY_PREPARE(1), /** * MODIFY_COLUMN_FAMILY_PRE_OPERATION = 2; */ - MODIFY_COLUMN_FAMILY_PRE_OPERATION(1, 2), + MODIFY_COLUMN_FAMILY_PRE_OPERATION(2), /** * MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3; */ - MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR(2, 3), + MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR(3), /** * MODIFY_COLUMN_FAMILY_POST_OPERATION = 4; */ - MODIFY_COLUMN_FAMILY_POST_OPERATION(3, 4), + MODIFY_COLUMN_FAMILY_POST_OPERATION(4), /** * MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 5; */ - MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS(4, 5), + MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS(5), ; /** @@ -965,9 +1035,19 @@ public final class MasterProcedureProtos { public static final int MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS_VALUE = 5; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static ModifyColumnFamilyState valueOf(int value) { + return forNumber(value); + } + + public static ModifyColumnFamilyState forNumber(int value) { switch (value) { case 1: return MODIFY_COLUMN_FAMILY_PREPARE; case 2: return MODIFY_COLUMN_FAMILY_PRE_OPERATION; @@ -982,17 +1062,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + ModifyColumnFamilyState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public ModifyColumnFamilyState findValueByNumber(int number) { - return ModifyColumnFamilyState.valueOf(number); + return ModifyColumnFamilyState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -1014,11 +1094,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private ModifyColumnFamilyState(int index, int value) { - this.index = index; + private ModifyColumnFamilyState(int value) { this.value = value; } @@ -1033,27 +1111,27 @@ public final class MasterProcedureProtos { /** * DELETE_COLUMN_FAMILY_PREPARE = 1; */ - DELETE_COLUMN_FAMILY_PREPARE(0, 1), + DELETE_COLUMN_FAMILY_PREPARE(1), /** * DELETE_COLUMN_FAMILY_PRE_OPERATION = 2; */ - DELETE_COLUMN_FAMILY_PRE_OPERATION(1, 2), + DELETE_COLUMN_FAMILY_PRE_OPERATION(2), /** * DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3; */ - DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR(2, 3), + DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR(3), /** * DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT = 4; */ - DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT(3, 4), + DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT(4), /** * DELETE_COLUMN_FAMILY_POST_OPERATION = 5; */ - DELETE_COLUMN_FAMILY_POST_OPERATION(4, 5), + DELETE_COLUMN_FAMILY_POST_OPERATION(5), /** * DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 6; */ - DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS(5, 6), + DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS(6), ; /** @@ -1082,9 +1160,19 @@ public final class MasterProcedureProtos { public static final int DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS_VALUE = 6; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static DeleteColumnFamilyState valueOf(int value) { + return forNumber(value); + } + + public static DeleteColumnFamilyState forNumber(int value) { switch (value) { case 1: return DELETE_COLUMN_FAMILY_PREPARE; case 2: return DELETE_COLUMN_FAMILY_PRE_OPERATION; @@ -1100,17 +1188,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + DeleteColumnFamilyState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public DeleteColumnFamilyState findValueByNumber(int number) { - return DeleteColumnFamilyState.valueOf(number); + return DeleteColumnFamilyState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -1132,11 +1220,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private DeleteColumnFamilyState(int index, int value) { - this.index = index; + private DeleteColumnFamilyState(int value) { this.value = value; } @@ -1151,27 +1237,27 @@ public final class MasterProcedureProtos { /** * ENABLE_TABLE_PREPARE = 1; */ - ENABLE_TABLE_PREPARE(0, 1), + ENABLE_TABLE_PREPARE(1), /** * ENABLE_TABLE_PRE_OPERATION = 2; */ - ENABLE_TABLE_PRE_OPERATION(1, 2), + ENABLE_TABLE_PRE_OPERATION(2), /** * ENABLE_TABLE_SET_ENABLING_TABLE_STATE = 3; */ - ENABLE_TABLE_SET_ENABLING_TABLE_STATE(2, 3), + ENABLE_TABLE_SET_ENABLING_TABLE_STATE(3), /** * ENABLE_TABLE_MARK_REGIONS_ONLINE = 4; */ - ENABLE_TABLE_MARK_REGIONS_ONLINE(3, 4), + ENABLE_TABLE_MARK_REGIONS_ONLINE(4), /** * ENABLE_TABLE_SET_ENABLED_TABLE_STATE = 5; */ - ENABLE_TABLE_SET_ENABLED_TABLE_STATE(4, 5), + ENABLE_TABLE_SET_ENABLED_TABLE_STATE(5), /** * ENABLE_TABLE_POST_OPERATION = 6; */ - ENABLE_TABLE_POST_OPERATION(5, 6), + ENABLE_TABLE_POST_OPERATION(6), ; /** @@ -1200,9 +1286,19 @@ public final class MasterProcedureProtos { public static final int ENABLE_TABLE_POST_OPERATION_VALUE = 6; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static EnableTableState valueOf(int value) { + return forNumber(value); + } + + public static EnableTableState forNumber(int value) { switch (value) { case 1: return ENABLE_TABLE_PREPARE; case 2: return ENABLE_TABLE_PRE_OPERATION; @@ -1218,17 +1314,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + EnableTableState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public EnableTableState findValueByNumber(int number) { - return EnableTableState.valueOf(number); + return EnableTableState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -1250,11 +1346,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private EnableTableState(int index, int value) { - this.index = index; + private EnableTableState(int value) { this.value = value; } @@ -1269,27 +1363,27 @@ public final class MasterProcedureProtos { /** * DISABLE_TABLE_PREPARE = 1; */ - DISABLE_TABLE_PREPARE(0, 1), + DISABLE_TABLE_PREPARE(1), /** * DISABLE_TABLE_PRE_OPERATION = 2; */ - DISABLE_TABLE_PRE_OPERATION(1, 2), + DISABLE_TABLE_PRE_OPERATION(2), /** * DISABLE_TABLE_SET_DISABLING_TABLE_STATE = 3; */ - DISABLE_TABLE_SET_DISABLING_TABLE_STATE(2, 3), + DISABLE_TABLE_SET_DISABLING_TABLE_STATE(3), /** * DISABLE_TABLE_MARK_REGIONS_OFFLINE = 4; */ - DISABLE_TABLE_MARK_REGIONS_OFFLINE(3, 4), + DISABLE_TABLE_MARK_REGIONS_OFFLINE(4), /** * DISABLE_TABLE_SET_DISABLED_TABLE_STATE = 5; */ - DISABLE_TABLE_SET_DISABLED_TABLE_STATE(4, 5), + DISABLE_TABLE_SET_DISABLED_TABLE_STATE(5), /** * DISABLE_TABLE_POST_OPERATION = 6; */ - DISABLE_TABLE_POST_OPERATION(5, 6), + DISABLE_TABLE_POST_OPERATION(6), ; /** @@ -1318,9 +1412,19 @@ public final class MasterProcedureProtos { public static final int DISABLE_TABLE_POST_OPERATION_VALUE = 6; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static DisableTableState valueOf(int value) { + return forNumber(value); + } + + public static DisableTableState forNumber(int value) { switch (value) { case 1: return DISABLE_TABLE_PREPARE; case 2: return DISABLE_TABLE_PRE_OPERATION; @@ -1336,17 +1440,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + DisableTableState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public DisableTableState findValueByNumber(int number) { - return DisableTableState.valueOf(number); + return DisableTableState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -1368,11 +1472,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private DisableTableState(int index, int value) { - this.index = index; + private DisableTableState(int value) { this.value = value; } @@ -1387,27 +1489,27 @@ public final class MasterProcedureProtos { /** * CLONE_SNAPSHOT_PRE_OPERATION = 1; */ - CLONE_SNAPSHOT_PRE_OPERATION(0, 1), + CLONE_SNAPSHOT_PRE_OPERATION(1), /** * CLONE_SNAPSHOT_WRITE_FS_LAYOUT = 2; */ - CLONE_SNAPSHOT_WRITE_FS_LAYOUT(1, 2), + CLONE_SNAPSHOT_WRITE_FS_LAYOUT(2), /** * CLONE_SNAPSHOT_ADD_TO_META = 3; */ - CLONE_SNAPSHOT_ADD_TO_META(2, 3), + CLONE_SNAPSHOT_ADD_TO_META(3), /** * CLONE_SNAPSHOT_ASSIGN_REGIONS = 4; */ - CLONE_SNAPSHOT_ASSIGN_REGIONS(3, 4), + CLONE_SNAPSHOT_ASSIGN_REGIONS(4), /** * CLONE_SNAPSHOT_UPDATE_DESC_CACHE = 5; */ - CLONE_SNAPSHOT_UPDATE_DESC_CACHE(4, 5), + CLONE_SNAPSHOT_UPDATE_DESC_CACHE(5), /** * CLONE_SNAPSHOT_POST_OPERATION = 6; */ - CLONE_SNAPSHOT_POST_OPERATION(5, 6), + CLONE_SNAPSHOT_POST_OPERATION(6), ; /** @@ -1436,9 +1538,19 @@ public final class MasterProcedureProtos { public static final int CLONE_SNAPSHOT_POST_OPERATION_VALUE = 6; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static CloneSnapshotState valueOf(int value) { + return forNumber(value); + } + + public static CloneSnapshotState forNumber(int value) { switch (value) { case 1: return CLONE_SNAPSHOT_PRE_OPERATION; case 2: return CLONE_SNAPSHOT_WRITE_FS_LAYOUT; @@ -1454,17 +1566,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + CloneSnapshotState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public CloneSnapshotState findValueByNumber(int number) { - return CloneSnapshotState.valueOf(number); + return CloneSnapshotState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -1486,11 +1598,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private CloneSnapshotState(int index, int value) { - this.index = index; + private CloneSnapshotState(int value) { this.value = value; } @@ -1505,19 +1615,19 @@ public final class MasterProcedureProtos { /** * RESTORE_SNAPSHOT_PRE_OPERATION = 1; */ - RESTORE_SNAPSHOT_PRE_OPERATION(0, 1), + RESTORE_SNAPSHOT_PRE_OPERATION(1), /** * RESTORE_SNAPSHOT_UPDATE_TABLE_DESCRIPTOR = 2; */ - RESTORE_SNAPSHOT_UPDATE_TABLE_DESCRIPTOR(1, 2), + RESTORE_SNAPSHOT_UPDATE_TABLE_DESCRIPTOR(2), /** * RESTORE_SNAPSHOT_WRITE_FS_LAYOUT = 3; */ - RESTORE_SNAPSHOT_WRITE_FS_LAYOUT(2, 3), + RESTORE_SNAPSHOT_WRITE_FS_LAYOUT(3), /** * RESTORE_SNAPSHOT_UPDATE_META = 4; */ - RESTORE_SNAPSHOT_UPDATE_META(3, 4), + RESTORE_SNAPSHOT_UPDATE_META(4), ; /** @@ -1538,9 +1648,19 @@ public final class MasterProcedureProtos { public static final int RESTORE_SNAPSHOT_UPDATE_META_VALUE = 4; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static RestoreSnapshotState valueOf(int value) { + return forNumber(value); + } + + public static RestoreSnapshotState forNumber(int value) { switch (value) { case 1: return RESTORE_SNAPSHOT_PRE_OPERATION; case 2: return RESTORE_SNAPSHOT_UPDATE_TABLE_DESCRIPTOR; @@ -1554,17 +1674,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + RestoreSnapshotState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public RestoreSnapshotState findValueByNumber(int number) { - return RestoreSnapshotState.valueOf(number); + return RestoreSnapshotState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -1586,11 +1706,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private RestoreSnapshotState(int index, int value) { - this.index = index; + private RestoreSnapshotState(int value) { this.value = value; } @@ -1605,23 +1723,23 @@ public final class MasterProcedureProtos { /** * DISPATCH_MERGING_REGIONS_PREPARE = 1; */ - DISPATCH_MERGING_REGIONS_PREPARE(0, 1), + DISPATCH_MERGING_REGIONS_PREPARE(1), /** * DISPATCH_MERGING_REGIONS_PRE_OPERATION = 2; */ - DISPATCH_MERGING_REGIONS_PRE_OPERATION(1, 2), + DISPATCH_MERGING_REGIONS_PRE_OPERATION(2), /** * DISPATCH_MERGING_REGIONS_MOVE_REGION_TO_SAME_RS = 3; */ - DISPATCH_MERGING_REGIONS_MOVE_REGION_TO_SAME_RS(2, 3), + DISPATCH_MERGING_REGIONS_MOVE_REGION_TO_SAME_RS(3), /** * DISPATCH_MERGING_REGIONS_DO_MERGE_IN_RS = 4; */ - DISPATCH_MERGING_REGIONS_DO_MERGE_IN_RS(3, 4), + DISPATCH_MERGING_REGIONS_DO_MERGE_IN_RS(4), /** * DISPATCH_MERGING_REGIONS_POST_OPERATION = 5; */ - DISPATCH_MERGING_REGIONS_POST_OPERATION(4, 5), + DISPATCH_MERGING_REGIONS_POST_OPERATION(5), ; /** @@ -1646,9 +1764,19 @@ public final class MasterProcedureProtos { public static final int DISPATCH_MERGING_REGIONS_POST_OPERATION_VALUE = 5; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static DispatchMergingRegionsState valueOf(int value) { + return forNumber(value); + } + + public static DispatchMergingRegionsState forNumber(int value) { switch (value) { case 1: return DISPATCH_MERGING_REGIONS_PREPARE; case 2: return DISPATCH_MERGING_REGIONS_PRE_OPERATION; @@ -1663,17 +1791,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + DispatchMergingRegionsState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public DispatchMergingRegionsState findValueByNumber(int number) { - return DispatchMergingRegionsState.valueOf(number); + return DispatchMergingRegionsState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -1695,11 +1823,9 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private DispatchMergingRegionsState(int index, int value) { - this.index = index; + private DispatchMergingRegionsState(int value) { this.value = value; } @@ -1714,43 +1840,43 @@ public final class MasterProcedureProtos { /** * SERVER_CRASH_START = 1; */ - SERVER_CRASH_START(0, 1), + SERVER_CRASH_START(1), /** * SERVER_CRASH_PROCESS_META = 2; */ - SERVER_CRASH_PROCESS_META(1, 2), + SERVER_CRASH_PROCESS_META(2), /** * SERVER_CRASH_GET_REGIONS = 3; */ - SERVER_CRASH_GET_REGIONS(2, 3), + SERVER_CRASH_GET_REGIONS(3), /** * SERVER_CRASH_NO_SPLIT_LOGS = 4; */ - SERVER_CRASH_NO_SPLIT_LOGS(3, 4), + SERVER_CRASH_NO_SPLIT_LOGS(4), /** * SERVER_CRASH_SPLIT_LOGS = 5; */ - SERVER_CRASH_SPLIT_LOGS(4, 5), + SERVER_CRASH_SPLIT_LOGS(5), /** * SERVER_CRASH_PREPARE_LOG_REPLAY = 6; */ - SERVER_CRASH_PREPARE_LOG_REPLAY(5, 6), + SERVER_CRASH_PREPARE_LOG_REPLAY(6), /** - * SERVER_CRASH_ASSIGN = 8; - * *
      * Removed SERVER_CRASH_CALC_REGIONS_TO_ASSIGN = 7;
      * 
+ * + * SERVER_CRASH_ASSIGN = 8; */ - SERVER_CRASH_ASSIGN(6, 8), + SERVER_CRASH_ASSIGN(8), /** * SERVER_CRASH_WAIT_ON_ASSIGN = 9; */ - SERVER_CRASH_WAIT_ON_ASSIGN(7, 9), + SERVER_CRASH_WAIT_ON_ASSIGN(9), /** * SERVER_CRASH_FINISH = 100; */ - SERVER_CRASH_FINISH(8, 100), + SERVER_CRASH_FINISH(100), ; /** @@ -1778,11 +1904,11 @@ public final class MasterProcedureProtos { */ public static final int SERVER_CRASH_PREPARE_LOG_REPLAY_VALUE = 6; /** - * SERVER_CRASH_ASSIGN = 8; - * *
      * Removed SERVER_CRASH_CALC_REGIONS_TO_ASSIGN = 7;
      * 
+ * + * SERVER_CRASH_ASSIGN = 8; */ public static final int SERVER_CRASH_ASSIGN_VALUE = 8; /** @@ -1795,9 +1921,19 @@ public final class MasterProcedureProtos { public static final int SERVER_CRASH_FINISH_VALUE = 100; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static ServerCrashState valueOf(int value) { + return forNumber(value); + } + + public static ServerCrashState forNumber(int value) { switch (value) { case 1: return SERVER_CRASH_START; case 2: return SERVER_CRASH_PROCESS_META; @@ -1816,17 +1952,17 @@ public final class MasterProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + ServerCrashState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public ServerCrashState findValueByNumber(int number) { - return ServerCrashState.valueOf(number); + return ServerCrashState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -1848,21 +1984,19 @@ public final class MasterProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private ServerCrashState(int index, int value) { - this.index = index; + private ServerCrashState(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.ServerCrashState) } - public interface CreateTableStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CreateTableStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CreateTableStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -1876,7 +2010,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required .hbase.pb.TableSchema table_schema = 2; /** * required .hbase.pb.TableSchema table_schema = 2; */ @@ -1890,7 +2023,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); - // repeated .hbase.pb.RegionInfo region_info = 3; /** * repeated .hbase.pb.RegionInfo region_info = 3; */ @@ -1918,36 +2050,28 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.CreateTableStateData} */ - public static final class CreateTableStateData extends - com.google.protobuf.GeneratedMessage - implements CreateTableStateDataOrBuilder { + public static final class CreateTableStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CreateTableStateData) + CreateTableStateDataOrBuilder { // Use CreateTableStateData.newBuilder() to construct. - private CreateTableStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private CreateTableStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CreateTableStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CreateTableStateData defaultInstance; - public static CreateTableStateData getDefaultInstance() { - return defaultInstance; } - - public CreateTableStateData getDefaultInstanceForType() { - return defaultInstance; + private CreateTableStateData() { + regionInfo_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CreateTableStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1997,7 +2121,8 @@ public final class MasterProcedureProtos { regionInfo_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionInfo_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } } @@ -2006,7 +2131,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); @@ -2020,30 +2145,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CreateTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CreateTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CreateTableStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CreateTableStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -2056,16 +2165,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required .hbase.pb.TableSchema table_schema = 2; public static final int TABLE_SCHEMA_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_; /** @@ -2078,16 +2186,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableSchema table_schema = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } /** * required .hbase.pb.TableSchema table_schema = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } - // repeated .hbase.pb.RegionInfo region_info = 3; public static final int REGION_INFO_FIELD_NUMBER = 3; private java.util.List regionInfo_; /** @@ -2123,15 +2230,11 @@ public final class MasterProcedureProtos { return regionInfo_.get(index); } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - regionInfo_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -2161,50 +2264,42 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableSchema_); + output.writeMessage(2, getTableSchema()); } for (int i = 0; i < regionInfo_.size(); i++) { output.writeMessage(3, regionInfo_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableSchema_); + .computeMessageSize(2, getTableSchema()); } for (int i = 0; i < regionInfo_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, regionInfo_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2227,12 +2322,10 @@ public final class MasterProcedureProtos { } result = result && getRegionInfoList() .equals(other.getRegionInfoList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2252,7 +2345,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + REGION_INFO_FIELD_NUMBER; hash = (53 * hash) + getRegionInfoList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2280,46 +2373,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2327,14 +2431,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.CreateTableStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CreateTableStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CreateTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CreateTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2347,31 +2452,28 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getTableSchemaFieldBuilder(); getRegionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; } else { tableSchemaBuilder_.clear(); } @@ -2385,10 +2487,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CreateTableStateData_descriptor; @@ -2440,6 +2538,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData)other); @@ -2476,37 +2600,33 @@ public final class MasterProcedureProtos { regionInfo_ = other.regionInfo_; bitField0_ = (bitField0_ & ~0x00000004); regionInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionInfoFieldBuilder() : null; } else { regionInfoBuilder_.addAllMessages(other.regionInfo_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasTableSchema()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (!getTableSchema().isInitialized()) { - return false; } for (int i = 0; i < getRegionInfoCount(); i++) { if (!getRegionInfo(i).isInitialized()) { - return false; } } @@ -2522,7 +2642,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2532,9 +2652,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -2547,7 +2666,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -2588,6 +2707,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -2606,7 +2726,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -2629,19 +2749,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -2649,9 +2770,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required .hbase.pb.TableSchema table_schema = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** * required .hbase.pb.TableSchema table_schema = 2; @@ -2664,7 +2784,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } else { return tableSchemaBuilder_.getMessage(); } @@ -2705,6 +2825,7 @@ public final class MasterProcedureProtos { public Builder mergeTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + tableSchema_ != null && tableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); @@ -2723,7 +2844,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; onChanged(); } else { tableSchemaBuilder_.clear(); @@ -2746,19 +2867,20 @@ public final class MasterProcedureProtos { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); } else { - return tableSchema_; + return tableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } } /** * required .hbase.pb.TableSchema table_schema = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - tableSchema_, + getTableSchema(), getParentForChildren(), isClean()); tableSchema_ = null; @@ -2766,7 +2888,6 @@ public final class MasterProcedureProtos { return tableSchemaBuilder_; } - // repeated .hbase.pb.RegionInfo region_info = 3; private java.util.List regionInfo_ = java.util.Collections.emptyList(); private void ensureRegionInfoIsMutable() { @@ -2776,7 +2897,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** @@ -2908,7 +3029,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (regionInfoBuilder_ == null) { ensureRegionInfoIsMutable(); - super.addAll(values, regionInfo_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionInfo_); onChanged(); } else { regionInfoBuilder_.addAllMessages(values); @@ -2991,11 +3113,11 @@ public final class MasterProcedureProtos { getRegionInfoBuilderList() { return getRegionInfoFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionInfo_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -3005,22 +3127,59 @@ public final class MasterProcedureProtos { } return regionInfoBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CreateTableStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.CreateTableStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData DEFAULT_INSTANCE; static { - defaultInstance = new CreateTableStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CreateTableStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CreateTableStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CreateTableStateData) } - public interface ModifyTableStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ModifyTableStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ModifyTableStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -3034,7 +3193,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // optional .hbase.pb.TableSchema unmodified_table_schema = 2; /** * optional .hbase.pb.TableSchema unmodified_table_schema = 2; */ @@ -3048,7 +3206,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder(); - // required .hbase.pb.TableSchema modified_table_schema = 3; /** * required .hbase.pb.TableSchema modified_table_schema = 3; */ @@ -3062,7 +3219,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getModifiedTableSchemaOrBuilder(); - // required bool delete_column_family_in_modify = 4; /** * required bool delete_column_family_in_modify = 4; */ @@ -3075,36 +3231,28 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.ModifyTableStateData} */ - public static final class ModifyTableStateData extends - com.google.protobuf.GeneratedMessage - implements ModifyTableStateDataOrBuilder { + public static final class ModifyTableStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ModifyTableStateData) + ModifyTableStateDataOrBuilder { // Use ModifyTableStateData.newBuilder() to construct. - private ModifyTableStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private ModifyTableStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ModifyTableStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ModifyTableStateData defaultInstance; - public static ModifyTableStateData getDefaultInstance() { - return defaultInstance; - } - - public ModifyTableStateData getDefaultInstanceForType() { - return defaultInstance; + private ModifyTableStateData() { + deleteColumnFamilyInModify_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ModifyTableStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3173,7 +3321,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -3184,30 +3332,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ModifyTableStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ModifyTableStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -3220,16 +3352,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // optional .hbase.pb.TableSchema unmodified_table_schema = 2; public static final int UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_; /** @@ -3242,16 +3373,15 @@ public final class MasterProcedureProtos { * optional .hbase.pb.TableSchema unmodified_table_schema = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } /** * optional .hbase.pb.TableSchema unmodified_table_schema = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder() { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } - // required .hbase.pb.TableSchema modified_table_schema = 3; public static final int MODIFIED_TABLE_SCHEMA_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema modifiedTableSchema_; /** @@ -3264,16 +3394,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableSchema modified_table_schema = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getModifiedTableSchema() { - return modifiedTableSchema_; + return modifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : modifiedTableSchema_; } /** * required .hbase.pb.TableSchema modified_table_schema = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getModifiedTableSchemaOrBuilder() { - return modifiedTableSchema_; + return modifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : modifiedTableSchema_; } - // required bool delete_column_family_in_modify = 4; public static final int DELETE_COLUMN_FAMILY_IN_MODIFY_FIELD_NUMBER = 4; private boolean deleteColumnFamilyInModify_; /** @@ -3289,16 +3418,11 @@ public final class MasterProcedureProtos { return deleteColumnFamilyInModify_; } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - modifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - deleteColumnFamilyInModify_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -3332,57 +3456,49 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, unmodifiedTableSchema_); + output.writeMessage(2, getUnmodifiedTableSchema()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, modifiedTableSchema_); + output.writeMessage(3, getModifiedTableSchema()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(4, deleteColumnFamilyInModify_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, unmodifiedTableSchema_); + .computeMessageSize(2, getUnmodifiedTableSchema()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, modifiedTableSchema_); + .computeMessageSize(3, getModifiedTableSchema()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, deleteColumnFamilyInModify_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3413,12 +3529,10 @@ public final class MasterProcedureProtos { result = result && (getDeleteColumnFamilyInModify() == other.getDeleteColumnFamilyInModify()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3440,9 +3554,10 @@ public final class MasterProcedureProtos { } if (hasDeleteColumnFamilyInModify()) { hash = (37 * hash) + DELETE_COLUMN_FAMILY_IN_MODIFY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getDeleteColumnFamilyInModify()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDeleteColumnFamilyInModify()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3470,46 +3585,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3517,14 +3643,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.ModifyTableStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ModifyTableStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3537,37 +3664,34 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getUnmodifiedTableSchemaFieldBuilder(); getModifiedTableSchemaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + unmodifiedTableSchema_ = null; } else { unmodifiedTableSchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (modifiedTableSchemaBuilder_ == null) { - modifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + modifiedTableSchema_ = null; } else { modifiedTableSchemaBuilder_.clear(); } @@ -3577,10 +3701,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyTableStateData_descriptor; @@ -3635,6 +3755,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData)other); @@ -3658,35 +3804,30 @@ public final class MasterProcedureProtos { if (other.hasDeleteColumnFamilyInModify()) { setDeleteColumnFamilyInModify(other.getDeleteColumnFamilyInModify()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasModifiedTableSchema()) { - return false; } if (!hasDeleteColumnFamilyInModify()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (hasUnmodifiedTableSchema()) { if (!getUnmodifiedTableSchema().isInitialized()) { - return false; } } if (!getModifiedTableSchema().isInitialized()) { - return false; } return true; @@ -3701,7 +3842,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3711,9 +3852,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -3726,7 +3866,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -3767,6 +3907,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -3785,7 +3926,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -3808,19 +3949,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -3828,9 +3970,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // optional .hbase.pb.TableSchema unmodified_table_schema = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> unmodifiedTableSchemaBuilder_; /** * optional .hbase.pb.TableSchema unmodified_table_schema = 2; @@ -3843,7 +3984,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() { if (unmodifiedTableSchemaBuilder_ == null) { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } else { return unmodifiedTableSchemaBuilder_.getMessage(); } @@ -3884,6 +4025,7 @@ public final class MasterProcedureProtos { public Builder mergeUnmodifiedTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (unmodifiedTableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + unmodifiedTableSchema_ != null && unmodifiedTableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(unmodifiedTableSchema_).mergeFrom(value).buildPartial(); @@ -3902,7 +4044,7 @@ public final class MasterProcedureProtos { */ public Builder clearUnmodifiedTableSchema() { if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + unmodifiedTableSchema_ = null; onChanged(); } else { unmodifiedTableSchemaBuilder_.clear(); @@ -3925,19 +4067,20 @@ public final class MasterProcedureProtos { if (unmodifiedTableSchemaBuilder_ != null) { return unmodifiedTableSchemaBuilder_.getMessageOrBuilder(); } else { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } } /** * optional .hbase.pb.TableSchema unmodified_table_schema = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getUnmodifiedTableSchemaFieldBuilder() { if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - unmodifiedTableSchema_, + getUnmodifiedTableSchema(), getParentForChildren(), isClean()); unmodifiedTableSchema_ = null; @@ -3945,9 +4088,8 @@ public final class MasterProcedureProtos { return unmodifiedTableSchemaBuilder_; } - // required .hbase.pb.TableSchema modified_table_schema = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema modifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema modifiedTableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> modifiedTableSchemaBuilder_; /** * required .hbase.pb.TableSchema modified_table_schema = 3; @@ -3960,7 +4102,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getModifiedTableSchema() { if (modifiedTableSchemaBuilder_ == null) { - return modifiedTableSchema_; + return modifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : modifiedTableSchema_; } else { return modifiedTableSchemaBuilder_.getMessage(); } @@ -4001,6 +4143,7 @@ public final class MasterProcedureProtos { public Builder mergeModifiedTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (modifiedTableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + modifiedTableSchema_ != null && modifiedTableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { modifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(modifiedTableSchema_).mergeFrom(value).buildPartial(); @@ -4019,7 +4162,7 @@ public final class MasterProcedureProtos { */ public Builder clearModifiedTableSchema() { if (modifiedTableSchemaBuilder_ == null) { - modifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + modifiedTableSchema_ = null; onChanged(); } else { modifiedTableSchemaBuilder_.clear(); @@ -4042,19 +4185,20 @@ public final class MasterProcedureProtos { if (modifiedTableSchemaBuilder_ != null) { return modifiedTableSchemaBuilder_.getMessageOrBuilder(); } else { - return modifiedTableSchema_; + return modifiedTableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : modifiedTableSchema_; } } /** * required .hbase.pb.TableSchema modified_table_schema = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getModifiedTableSchemaFieldBuilder() { if (modifiedTableSchemaBuilder_ == null) { - modifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + modifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - modifiedTableSchema_, + getModifiedTableSchema(), getParentForChildren(), isClean()); modifiedTableSchema_ = null; @@ -4062,7 +4206,6 @@ public final class MasterProcedureProtos { return modifiedTableSchemaBuilder_; } - // required bool delete_column_family_in_modify = 4; private boolean deleteColumnFamilyInModify_ ; /** * required bool delete_column_family_in_modify = 4; @@ -4094,22 +4237,59 @@ public final class MasterProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ModifyTableStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.ModifyTableStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData DEFAULT_INSTANCE; static { - defaultInstance = new ModifyTableStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ModifyTableStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyTableStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ModifyTableStateData) } - public interface TruncateTableStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TruncateTableStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TruncateTableStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -4123,7 +4303,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required bool preserve_splits = 2; /** * required bool preserve_splits = 2; */ @@ -4133,7 +4312,6 @@ public final class MasterProcedureProtos { */ boolean getPreserveSplits(); - // optional .hbase.pb.TableName table_name = 3; /** * optional .hbase.pb.TableName table_name = 3; */ @@ -4147,7 +4325,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // optional .hbase.pb.TableSchema table_schema = 4; /** * optional .hbase.pb.TableSchema table_schema = 4; */ @@ -4161,7 +4338,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); - // repeated .hbase.pb.RegionInfo region_info = 5; /** * repeated .hbase.pb.RegionInfo region_info = 5; */ @@ -4189,36 +4365,29 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.TruncateTableStateData} */ - public static final class TruncateTableStateData extends - com.google.protobuf.GeneratedMessage - implements TruncateTableStateDataOrBuilder { + public static final class TruncateTableStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TruncateTableStateData) + TruncateTableStateDataOrBuilder { // Use TruncateTableStateData.newBuilder() to construct. - private TruncateTableStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private TruncateTableStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TruncateTableStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TruncateTableStateData defaultInstance; - public static TruncateTableStateData getDefaultInstance() { - return defaultInstance; } - - public TruncateTableStateData getDefaultInstanceForType() { - return defaultInstance; + private TruncateTableStateData() { + preserveSplits_ = false; + regionInfo_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TruncateTableStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4286,7 +4455,8 @@ public final class MasterProcedureProtos { regionInfo_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000010; } - regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionInfo_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } } @@ -4295,7 +4465,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); @@ -4309,30 +4479,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_TruncateTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_TruncateTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TruncateTableStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TruncateTableStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -4345,16 +4499,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required bool preserve_splits = 2; public static final int PRESERVE_SPLITS_FIELD_NUMBER = 2; private boolean preserveSplits_; /** @@ -4370,7 +4523,6 @@ public final class MasterProcedureProtos { return preserveSplits_; } - // optional .hbase.pb.TableName table_name = 3; public static final int TABLE_NAME_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -4383,16 +4535,15 @@ public final class MasterProcedureProtos { * optional .hbase.pb.TableName table_name = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * optional .hbase.pb.TableName table_name = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // optional .hbase.pb.TableSchema table_schema = 4; public static final int TABLE_SCHEMA_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_; /** @@ -4405,16 +4556,15 @@ public final class MasterProcedureProtos { * optional .hbase.pb.TableSchema table_schema = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } /** * optional .hbase.pb.TableSchema table_schema = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } - // repeated .hbase.pb.RegionInfo region_info = 5; public static final int REGION_INFO_FIELD_NUMBER = 5; private java.util.List regionInfo_; /** @@ -4450,17 +4600,11 @@ public final class MasterProcedureProtos { return regionInfo_.get(index); } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - preserveSplits_ = false; - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - regionInfo_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -4498,34 +4642,32 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, preserveSplits_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, tableName_); + output.writeMessage(3, getTableName()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, tableSchema_); + output.writeMessage(4, getTableSchema()); } for (int i = 0; i < regionInfo_.size(); i++) { output.writeMessage(5, regionInfo_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -4533,29 +4675,23 @@ public final class MasterProcedureProtos { } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, tableName_); + .computeMessageSize(3, getTableName()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, tableSchema_); + .computeMessageSize(4, getTableSchema()); } for (int i = 0; i < regionInfo_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, regionInfo_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4588,12 +4724,10 @@ public final class MasterProcedureProtos { } result = result && getRegionInfoList() .equals(other.getRegionInfoList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4607,7 +4741,8 @@ public final class MasterProcedureProtos { } if (hasPreserveSplits()) { hash = (37 * hash) + PRESERVE_SPLITS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getPreserveSplits()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getPreserveSplits()); } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; @@ -4621,7 +4756,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + REGION_INFO_FIELD_NUMBER; hash = (53 * hash) + getRegionInfoList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4649,46 +4784,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4696,14 +4842,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.TruncateTableStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TruncateTableStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_TruncateTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_TruncateTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4716,26 +4863,23 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getTableNameFieldBuilder(); getTableSchemaFieldBuilder(); getRegionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } @@ -4743,13 +4887,13 @@ public final class MasterProcedureProtos { preserveSplits_ = false; bitField0_ = (bitField0_ & ~0x00000002); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; } else { tableSchemaBuilder_.clear(); } @@ -4763,10 +4907,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_TruncateTableStateData_descriptor; @@ -4830,6 +4970,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData)other); @@ -4872,45 +5038,40 @@ public final class MasterProcedureProtos { regionInfo_ = other.regionInfo_; bitField0_ = (bitField0_ & ~0x00000010); regionInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionInfoFieldBuilder() : null; } else { regionInfoBuilder_.addAllMessages(other.regionInfo_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasPreserveSplits()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (hasTableName()) { if (!getTableName().isInitialized()) { - return false; } } if (hasTableSchema()) { if (!getTableSchema().isInitialized()) { - return false; } } for (int i = 0; i < getRegionInfoCount(); i++) { if (!getRegionInfo(i).isInitialized()) { - return false; } } @@ -4926,7 +5087,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4936,9 +5097,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -4951,7 +5111,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -4992,6 +5152,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -5010,7 +5171,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -5033,19 +5194,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -5053,7 +5215,6 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required bool preserve_splits = 2; private boolean preserveSplits_ ; /** * required bool preserve_splits = 2; @@ -5086,9 +5247,8 @@ public final class MasterProcedureProtos { return this; } - // optional .hbase.pb.TableName table_name = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * optional .hbase.pb.TableName table_name = 3; @@ -5101,7 +5261,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -5142,6 +5302,7 @@ public final class MasterProcedureProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -5160,7 +5321,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -5183,19 +5344,20 @@ public final class MasterProcedureProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * optional .hbase.pb.TableName table_name = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -5203,9 +5365,8 @@ public final class MasterProcedureProtos { return tableNameBuilder_; } - // optional .hbase.pb.TableSchema table_schema = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** * optional .hbase.pb.TableSchema table_schema = 4; @@ -5218,7 +5379,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } else { return tableSchemaBuilder_.getMessage(); } @@ -5259,6 +5420,7 @@ public final class MasterProcedureProtos { public Builder mergeTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + tableSchema_ != null && tableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); @@ -5277,7 +5439,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; onChanged(); } else { tableSchemaBuilder_.clear(); @@ -5300,19 +5462,20 @@ public final class MasterProcedureProtos { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); } else { - return tableSchema_; + return tableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } } /** * optional .hbase.pb.TableSchema table_schema = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - tableSchema_, + getTableSchema(), getParentForChildren(), isClean()); tableSchema_ = null; @@ -5320,7 +5483,6 @@ public final class MasterProcedureProtos { return tableSchemaBuilder_; } - // repeated .hbase.pb.RegionInfo region_info = 5; private java.util.List regionInfo_ = java.util.Collections.emptyList(); private void ensureRegionInfoIsMutable() { @@ -5330,7 +5492,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** @@ -5462,7 +5624,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (regionInfoBuilder_ == null) { ensureRegionInfoIsMutable(); - super.addAll(values, regionInfo_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionInfo_); onChanged(); } else { regionInfoBuilder_.addAllMessages(values); @@ -5545,11 +5708,11 @@ public final class MasterProcedureProtos { getRegionInfoBuilderList() { return getRegionInfoFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionInfo_, ((bitField0_ & 0x00000010) == 0x00000010), @@ -5559,22 +5722,59 @@ public final class MasterProcedureProtos { } return regionInfoBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TruncateTableStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.TruncateTableStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData DEFAULT_INSTANCE; static { - defaultInstance = new TruncateTableStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TruncateTableStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TruncateTableStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TruncateTableStateData) } - public interface DeleteTableStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteTableStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteTableStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -5588,7 +5788,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required .hbase.pb.TableName table_name = 2; /** * required .hbase.pb.TableName table_name = 2; */ @@ -5602,7 +5801,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // repeated .hbase.pb.RegionInfo region_info = 3; /** * repeated .hbase.pb.RegionInfo region_info = 3; */ @@ -5630,36 +5828,28 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.DeleteTableStateData} */ - public static final class DeleteTableStateData extends - com.google.protobuf.GeneratedMessage - implements DeleteTableStateDataOrBuilder { + public static final class DeleteTableStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteTableStateData) + DeleteTableStateDataOrBuilder { // Use DeleteTableStateData.newBuilder() to construct. - private DeleteTableStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteTableStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private DeleteTableStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteTableStateData defaultInstance; - public static DeleteTableStateData getDefaultInstance() { - return defaultInstance; - } - - public DeleteTableStateData getDefaultInstanceForType() { - return defaultInstance; + private DeleteTableStateData() { + regionInfo_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteTableStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -5709,7 +5899,8 @@ public final class MasterProcedureProtos { regionInfo_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionInfo_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } } @@ -5718,7 +5909,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); @@ -5732,30 +5923,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteTableStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteTableStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -5768,16 +5943,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required .hbase.pb.TableName table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -5790,16 +5964,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // repeated .hbase.pb.RegionInfo region_info = 3; public static final int REGION_INFO_FIELD_NUMBER = 3; private java.util.List regionInfo_; /** @@ -5835,15 +6008,11 @@ public final class MasterProcedureProtos { return regionInfo_.get(index); } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - regionInfo_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -5873,50 +6042,42 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableName_); + output.writeMessage(2, getTableName()); } for (int i = 0; i < regionInfo_.size(); i++) { output.writeMessage(3, regionInfo_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableName_); + .computeMessageSize(2, getTableName()); } for (int i = 0; i < regionInfo_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, regionInfo_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5939,12 +6100,10 @@ public final class MasterProcedureProtos { } result = result && getRegionInfoList() .equals(other.getRegionInfoList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5964,7 +6123,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + REGION_INFO_FIELD_NUMBER; hash = (53 * hash) + getRegionInfoList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5992,46 +6151,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6039,14 +6209,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.DeleteTableStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteTableStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6059,31 +6230,28 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getTableNameFieldBuilder(); getRegionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -6097,10 +6265,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteTableStateData_descriptor; @@ -6152,6 +6316,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData)other); @@ -6188,37 +6378,33 @@ public final class MasterProcedureProtos { regionInfo_ = other.regionInfo_; bitField0_ = (bitField0_ & ~0x00000004); regionInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionInfoFieldBuilder() : null; } else { regionInfoBuilder_.addAllMessages(other.regionInfo_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasTableName()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (!getTableName().isInitialized()) { - return false; } for (int i = 0; i < getRegionInfoCount(); i++) { if (!getRegionInfo(i).isInitialized()) { - return false; } } @@ -6234,7 +6420,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6244,9 +6430,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -6259,7 +6444,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -6300,6 +6485,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -6318,7 +6504,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -6341,19 +6527,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -6361,9 +6548,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required .hbase.pb.TableName table_name = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 2; @@ -6376,7 +6562,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -6417,6 +6603,7 @@ public final class MasterProcedureProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -6435,7 +6622,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -6458,19 +6645,20 @@ public final class MasterProcedureProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -6478,7 +6666,6 @@ public final class MasterProcedureProtos { return tableNameBuilder_; } - // repeated .hbase.pb.RegionInfo region_info = 3; private java.util.List regionInfo_ = java.util.Collections.emptyList(); private void ensureRegionInfoIsMutable() { @@ -6488,7 +6675,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** @@ -6620,7 +6807,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (regionInfoBuilder_ == null) { ensureRegionInfoIsMutable(); - super.addAll(values, regionInfo_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionInfo_); onChanged(); } else { regionInfoBuilder_.addAllMessages(values); @@ -6703,11 +6891,11 @@ public final class MasterProcedureProtos { getRegionInfoBuilderList() { return getRegionInfoFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionInfo_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -6717,22 +6905,59 @@ public final class MasterProcedureProtos { } return regionInfoBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteTableStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteTableStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData DEFAULT_INSTANCE; static { - defaultInstance = new DeleteTableStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteTableStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteTableStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteTableStateData) } - public interface CreateNamespaceStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CreateNamespaceStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CreateNamespaceStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; /** * required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; */ @@ -6749,36 +6974,27 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.CreateNamespaceStateData} */ - public static final class CreateNamespaceStateData extends - com.google.protobuf.GeneratedMessage - implements CreateNamespaceStateDataOrBuilder { + public static final class CreateNamespaceStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CreateNamespaceStateData) + CreateNamespaceStateDataOrBuilder { // Use CreateNamespaceStateData.newBuilder() to construct. - private CreateNamespaceStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private CreateNamespaceStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CreateNamespaceStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CreateNamespaceStateData defaultInstance; - public static CreateNamespaceStateData getDefaultInstance() { - return defaultInstance; } - - public CreateNamespaceStateData getDefaultInstanceForType() { - return defaultInstance; + private CreateNamespaceStateData() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CreateNamespaceStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6816,7 +7032,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -6827,30 +7043,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CreateNamespaceStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CreateNamespaceStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CreateNamespaceStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CreateNamespaceStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; public static final int NAMESPACE_DESCRIPTOR_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_; /** @@ -6863,22 +7063,20 @@ public final class MasterProcedureProtos { * required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } /** * required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } - private void initFields() { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespaceDescriptor()) { memoizedIsInitialized = 0; @@ -6894,36 +7092,28 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, namespaceDescriptor_); + output.writeMessage(1, getNamespaceDescriptor()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, namespaceDescriptor_); + .computeMessageSize(1, getNamespaceDescriptor()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6939,12 +7129,10 @@ public final class MasterProcedureProtos { result = result && getNamespaceDescriptor() .equals(other.getNamespaceDescriptor()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6956,7 +7144,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + NAMESPACE_DESCRIPTOR_FIELD_NUMBER; hash = (53 * hash) + getNamespaceDescriptor().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6984,46 +7172,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -7031,14 +7230,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.CreateNamespaceStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CreateNamespaceStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CreateNamespaceStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CreateNamespaceStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7051,23 +7251,20 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; } else { namespaceDescriptorBuilder_.clear(); } @@ -7075,10 +7272,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CreateNamespaceStateData_descriptor; @@ -7113,6 +7306,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData)other); @@ -7127,17 +7346,16 @@ public final class MasterProcedureProtos { if (other.hasNamespaceDescriptor()) { mergeNamespaceDescriptor(other.getNamespaceDescriptor()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespaceDescriptor()) { - return false; } if (!getNamespaceDescriptor().isInitialized()) { - return false; } return true; @@ -7152,7 +7370,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7162,9 +7380,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** * required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; @@ -7177,7 +7394,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } else { return namespaceDescriptorBuilder_.getMessage(); } @@ -7218,6 +7435,7 @@ public final class MasterProcedureProtos { public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + namespaceDescriptor_ != null && namespaceDescriptor_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) { namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial(); @@ -7236,7 +7454,7 @@ public final class MasterProcedureProtos { */ public Builder clearNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; onChanged(); } else { namespaceDescriptorBuilder_.clear(); @@ -7259,41 +7477,79 @@ public final class MasterProcedureProtos { if (namespaceDescriptorBuilder_ != null) { return namespaceDescriptorBuilder_.getMessageOrBuilder(); } else { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } } /** * required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( - namespaceDescriptor_, + getNamespaceDescriptor(), getParentForChildren(), isClean()); namespaceDescriptor_ = null; } return namespaceDescriptorBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CreateNamespaceStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.CreateNamespaceStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData DEFAULT_INSTANCE; static { - defaultInstance = new CreateNamespaceStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CreateNamespaceStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CreateNamespaceStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CreateNamespaceStateData) } - public interface ModifyNamespaceStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ModifyNamespaceStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ModifyNamespaceStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; /** * required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; */ @@ -7307,7 +7563,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(); - // optional .hbase.pb.NamespaceDescriptor unmodified_namespace_descriptor = 2; /** * optional .hbase.pb.NamespaceDescriptor unmodified_namespace_descriptor = 2; */ @@ -7324,36 +7579,27 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.ModifyNamespaceStateData} */ - public static final class ModifyNamespaceStateData extends - com.google.protobuf.GeneratedMessage - implements ModifyNamespaceStateDataOrBuilder { + public static final class ModifyNamespaceStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ModifyNamespaceStateData) + ModifyNamespaceStateDataOrBuilder { // Use ModifyNamespaceStateData.newBuilder() to construct. - private ModifyNamespaceStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private ModifyNamespaceStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ModifyNamespaceStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ModifyNamespaceStateData defaultInstance; - public static ModifyNamespaceStateData getDefaultInstance() { - return defaultInstance; } - - public ModifyNamespaceStateData getDefaultInstanceForType() { - return defaultInstance; + private ModifyNamespaceStateData() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ModifyNamespaceStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7404,7 +7650,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7415,30 +7661,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyNamespaceStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyNamespaceStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ModifyNamespaceStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ModifyNamespaceStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; public static final int NAMESPACE_DESCRIPTOR_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_; /** @@ -7451,16 +7681,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } /** * required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } - // optional .hbase.pb.NamespaceDescriptor unmodified_namespace_descriptor = 2; public static final int UNMODIFIED_NAMESPACE_DESCRIPTOR_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor unmodifiedNamespaceDescriptor_; /** @@ -7473,23 +7702,20 @@ public final class MasterProcedureProtos { * optional .hbase.pb.NamespaceDescriptor unmodified_namespace_descriptor = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getUnmodifiedNamespaceDescriptor() { - return unmodifiedNamespaceDescriptor_; + return unmodifiedNamespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : unmodifiedNamespaceDescriptor_; } /** * optional .hbase.pb.NamespaceDescriptor unmodified_namespace_descriptor = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getUnmodifiedNamespaceDescriptorOrBuilder() { - return unmodifiedNamespaceDescriptor_; + return unmodifiedNamespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : unmodifiedNamespaceDescriptor_; } - private void initFields() { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - unmodifiedNamespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespaceDescriptor()) { memoizedIsInitialized = 0; @@ -7511,43 +7737,35 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, namespaceDescriptor_); + output.writeMessage(1, getNamespaceDescriptor()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, unmodifiedNamespaceDescriptor_); + output.writeMessage(2, getUnmodifiedNamespaceDescriptor()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, namespaceDescriptor_); + .computeMessageSize(1, getNamespaceDescriptor()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, unmodifiedNamespaceDescriptor_); + .computeMessageSize(2, getUnmodifiedNamespaceDescriptor()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7568,12 +7786,10 @@ public final class MasterProcedureProtos { result = result && getUnmodifiedNamespaceDescriptor() .equals(other.getUnmodifiedNamespaceDescriptor()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7589,7 +7805,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + UNMODIFIED_NAMESPACE_DESCRIPTOR_FIELD_NUMBER; hash = (53 * hash) + getUnmodifiedNamespaceDescriptor().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7617,46 +7833,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -7664,14 +7891,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.ModifyNamespaceStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ModifyNamespaceStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyNamespaceStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyNamespaceStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7684,30 +7912,27 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); getUnmodifiedNamespaceDescriptorFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; } else { namespaceDescriptorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (unmodifiedNamespaceDescriptorBuilder_ == null) { - unmodifiedNamespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + unmodifiedNamespaceDescriptor_ = null; } else { unmodifiedNamespaceDescriptorBuilder_.clear(); } @@ -7715,10 +7940,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyNamespaceStateData_descriptor; @@ -7761,6 +7982,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData)other); @@ -7778,22 +8025,20 @@ public final class MasterProcedureProtos { if (other.hasUnmodifiedNamespaceDescriptor()) { mergeUnmodifiedNamespaceDescriptor(other.getUnmodifiedNamespaceDescriptor()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespaceDescriptor()) { - return false; } if (!getNamespaceDescriptor().isInitialized()) { - return false; } if (hasUnmodifiedNamespaceDescriptor()) { if (!getUnmodifiedNamespaceDescriptor().isInitialized()) { - return false; } } @@ -7809,7 +8054,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7819,9 +8064,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** * required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; @@ -7834,7 +8078,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } else { return namespaceDescriptorBuilder_.getMessage(); } @@ -7875,6 +8119,7 @@ public final class MasterProcedureProtos { public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + namespaceDescriptor_ != null && namespaceDescriptor_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) { namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial(); @@ -7893,7 +8138,7 @@ public final class MasterProcedureProtos { */ public Builder clearNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; onChanged(); } else { namespaceDescriptorBuilder_.clear(); @@ -7916,19 +8161,20 @@ public final class MasterProcedureProtos { if (namespaceDescriptorBuilder_ != null) { return namespaceDescriptorBuilder_.getMessageOrBuilder(); } else { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } } /** * required .hbase.pb.NamespaceDescriptor namespace_descriptor = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( - namespaceDescriptor_, + getNamespaceDescriptor(), getParentForChildren(), isClean()); namespaceDescriptor_ = null; @@ -7936,9 +8182,8 @@ public final class MasterProcedureProtos { return namespaceDescriptorBuilder_; } - // optional .hbase.pb.NamespaceDescriptor unmodified_namespace_descriptor = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor unmodifiedNamespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor unmodifiedNamespaceDescriptor_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> unmodifiedNamespaceDescriptorBuilder_; /** * optional .hbase.pb.NamespaceDescriptor unmodified_namespace_descriptor = 2; @@ -7951,7 +8196,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getUnmodifiedNamespaceDescriptor() { if (unmodifiedNamespaceDescriptorBuilder_ == null) { - return unmodifiedNamespaceDescriptor_; + return unmodifiedNamespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : unmodifiedNamespaceDescriptor_; } else { return unmodifiedNamespaceDescriptorBuilder_.getMessage(); } @@ -7992,6 +8237,7 @@ public final class MasterProcedureProtos { public Builder mergeUnmodifiedNamespaceDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (unmodifiedNamespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + unmodifiedNamespaceDescriptor_ != null && unmodifiedNamespaceDescriptor_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) { unmodifiedNamespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(unmodifiedNamespaceDescriptor_).mergeFrom(value).buildPartial(); @@ -8010,7 +8256,7 @@ public final class MasterProcedureProtos { */ public Builder clearUnmodifiedNamespaceDescriptor() { if (unmodifiedNamespaceDescriptorBuilder_ == null) { - unmodifiedNamespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + unmodifiedNamespaceDescriptor_ = null; onChanged(); } else { unmodifiedNamespaceDescriptorBuilder_.clear(); @@ -8033,41 +8279,79 @@ public final class MasterProcedureProtos { if (unmodifiedNamespaceDescriptorBuilder_ != null) { return unmodifiedNamespaceDescriptorBuilder_.getMessageOrBuilder(); } else { - return unmodifiedNamespaceDescriptor_; + return unmodifiedNamespaceDescriptor_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : unmodifiedNamespaceDescriptor_; } } /** * optional .hbase.pb.NamespaceDescriptor unmodified_namespace_descriptor = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getUnmodifiedNamespaceDescriptorFieldBuilder() { if (unmodifiedNamespaceDescriptorBuilder_ == null) { - unmodifiedNamespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + unmodifiedNamespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( - unmodifiedNamespaceDescriptor_, + getUnmodifiedNamespaceDescriptor(), getParentForChildren(), isClean()); unmodifiedNamespaceDescriptor_ = null; } return unmodifiedNamespaceDescriptorBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ModifyNamespaceStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.ModifyNamespaceStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData DEFAULT_INSTANCE; static { - defaultInstance = new ModifyNamespaceStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ModifyNamespaceStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyNamespaceStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ModifyNamespaceStateData) } - public interface DeleteNamespaceStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteNamespaceStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteNamespaceStateData) + com.google.protobuf.MessageOrBuilder { - // required string namespace_name = 1; /** * required string namespace_name = 1; */ @@ -8082,7 +8366,6 @@ public final class MasterProcedureProtos { com.google.protobuf.ByteString getNamespaceNameBytes(); - // optional .hbase.pb.NamespaceDescriptor namespace_descriptor = 2; /** * optional .hbase.pb.NamespaceDescriptor namespace_descriptor = 2; */ @@ -8099,36 +8382,28 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.DeleteNamespaceStateData} */ - public static final class DeleteNamespaceStateData extends - com.google.protobuf.GeneratedMessage - implements DeleteNamespaceStateDataOrBuilder { + public static final class DeleteNamespaceStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteNamespaceStateData) + DeleteNamespaceStateDataOrBuilder { // Use DeleteNamespaceStateData.newBuilder() to construct. - private DeleteNamespaceStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteNamespaceStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DeleteNamespaceStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteNamespaceStateData defaultInstance; - public static DeleteNamespaceStateData getDefaultInstance() { - return defaultInstance; } - - public DeleteNamespaceStateData getDefaultInstanceForType() { - return defaultInstance; + private DeleteNamespaceStateData() { + namespaceName_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteNamespaceStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8148,8 +8423,9 @@ public final class MasterProcedureProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - namespaceName_ = input.readBytes(); + namespaceName_ = bs; break; } case 18: { @@ -8171,7 +8447,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -8182,32 +8458,16 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteNamespaceStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteNamespaceStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteNamespaceStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteNamespaceStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string namespace_name = 1; public static final int NAMESPACE_NAME_FIELD_NUMBER = 1; - private java.lang.Object namespaceName_; + private volatile java.lang.Object namespaceName_; /** * required string namespace_name = 1; */ @@ -8248,7 +8508,6 @@ public final class MasterProcedureProtos { } } - // optional .hbase.pb.NamespaceDescriptor namespace_descriptor = 2; public static final int NAMESPACE_DESCRIPTOR_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_; /** @@ -8261,23 +8520,20 @@ public final class MasterProcedureProtos { * optional .hbase.pb.NamespaceDescriptor namespace_descriptor = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } /** * optional .hbase.pb.NamespaceDescriptor namespace_descriptor = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } - private void initFields() { - namespaceName_ = ""; - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespaceName()) { memoizedIsInitialized = 0; @@ -8295,43 +8551,34 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNamespaceNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, namespaceName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, namespaceDescriptor_); + output.writeMessage(2, getNamespaceDescriptor()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNamespaceNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, namespaceName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, namespaceDescriptor_); + .computeMessageSize(2, getNamespaceDescriptor()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8352,12 +8599,10 @@ public final class MasterProcedureProtos { result = result && getNamespaceDescriptor() .equals(other.getNamespaceDescriptor()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -8373,7 +8618,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + NAMESPACE_DESCRIPTOR_FIELD_NUMBER; hash = (53 * hash) + getNamespaceDescriptor().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -8401,46 +8646,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -8448,14 +8704,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.DeleteNamespaceStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteNamespaceStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteNamespaceStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteNamespaceStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8468,25 +8725,22 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); namespaceName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; } else { namespaceDescriptorBuilder_.clear(); } @@ -8494,10 +8748,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteNamespaceStateData_descriptor; @@ -8536,7 +8786,33 @@ public final class MasterProcedureProtos { return result; } - public Builder mergeFrom(com.google.protobuf.Message other) { + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData)other); } else { @@ -8555,18 +8831,17 @@ public final class MasterProcedureProtos { if (other.hasNamespaceDescriptor()) { mergeNamespaceDescriptor(other.getNamespaceDescriptor()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespaceName()) { - return false; } if (hasNamespaceDescriptor()) { if (!getNamespaceDescriptor().isInitialized()) { - return false; } } @@ -8582,7 +8857,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8592,7 +8867,6 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required string namespace_name = 1; private java.lang.Object namespaceName_ = ""; /** * required string namespace_name = 1; @@ -8606,9 +8880,12 @@ public final class MasterProcedureProtos { public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - namespaceName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + namespaceName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -8666,9 +8943,8 @@ public final class MasterProcedureProtos { return this; } - // optional .hbase.pb.NamespaceDescriptor namespace_descriptor = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** * optional .hbase.pb.NamespaceDescriptor namespace_descriptor = 2; @@ -8681,7 +8957,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } else { return namespaceDescriptorBuilder_.getMessage(); } @@ -8722,6 +8998,7 @@ public final class MasterProcedureProtos { public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + namespaceDescriptor_ != null && namespaceDescriptor_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) { namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial(); @@ -8740,7 +9017,7 @@ public final class MasterProcedureProtos { */ public Builder clearNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; onChanged(); } else { namespaceDescriptorBuilder_.clear(); @@ -8763,41 +9040,79 @@ public final class MasterProcedureProtos { if (namespaceDescriptorBuilder_ != null) { return namespaceDescriptorBuilder_.getMessageOrBuilder(); } else { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } } /** * optional .hbase.pb.NamespaceDescriptor namespace_descriptor = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( - namespaceDescriptor_, + getNamespaceDescriptor(), getParentForChildren(), isClean()); namespaceDescriptor_ = null; } return namespaceDescriptorBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteNamespaceStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteNamespaceStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData DEFAULT_INSTANCE; static { - defaultInstance = new DeleteNamespaceStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteNamespaceStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteNamespaceStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteNamespaceStateData) } - public interface AddColumnFamilyStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface AddColumnFamilyStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.AddColumnFamilyStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -8811,7 +9126,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required .hbase.pb.TableName table_name = 2; /** * required .hbase.pb.TableName table_name = 2; */ @@ -8825,7 +9139,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; /** * required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; */ @@ -8839,7 +9152,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnfamilySchemaOrBuilder(); - // optional .hbase.pb.TableSchema unmodified_table_schema = 4; /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ @@ -8856,36 +9168,27 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.AddColumnFamilyStateData} */ - public static final class AddColumnFamilyStateData extends - com.google.protobuf.GeneratedMessage - implements AddColumnFamilyStateDataOrBuilder { + public static final class AddColumnFamilyStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.AddColumnFamilyStateData) + AddColumnFamilyStateDataOrBuilder { // Use AddColumnFamilyStateData.newBuilder() to construct. - private AddColumnFamilyStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private AddColumnFamilyStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private AddColumnFamilyStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AddColumnFamilyStateData defaultInstance; - public static AddColumnFamilyStateData getDefaultInstance() { - return defaultInstance; + private AddColumnFamilyStateData() { } - public AddColumnFamilyStateData getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private AddColumnFamilyStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8962,7 +9265,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -8973,30 +9276,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_AddColumnFamilyStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_AddColumnFamilyStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public AddColumnFamilyStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AddColumnFamilyStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -9009,16 +9296,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required .hbase.pb.TableName table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -9031,16 +9317,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; public static final int COLUMNFAMILY_SCHEMA_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnfamilySchema_; /** @@ -9053,16 +9338,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnfamilySchema() { - return columnfamilySchema_; + return columnfamilySchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnfamilySchema_; } /** * required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnfamilySchemaOrBuilder() { - return columnfamilySchema_; + return columnfamilySchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnfamilySchema_; } - // optional .hbase.pb.TableSchema unmodified_table_schema = 4; public static final int UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_; /** @@ -9075,25 +9359,20 @@ public final class MasterProcedureProtos { * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder() { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - columnfamilySchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -9131,57 +9410,49 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableName_); + output.writeMessage(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, columnfamilySchema_); + output.writeMessage(3, getColumnfamilySchema()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, unmodifiedTableSchema_); + output.writeMessage(4, getUnmodifiedTableSchema()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableName_); + .computeMessageSize(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, columnfamilySchema_); + .computeMessageSize(3, getColumnfamilySchema()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, unmodifiedTableSchema_); + .computeMessageSize(4, getUnmodifiedTableSchema()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9212,12 +9483,10 @@ public final class MasterProcedureProtos { result = result && getUnmodifiedTableSchema() .equals(other.getUnmodifiedTableSchema()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9241,7 +9510,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER; hash = (53 * hash) + getUnmodifiedTableSchema().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9269,46 +9538,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -9316,14 +9596,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.AddColumnFamilyStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.AddColumnFamilyStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_AddColumnFamilyStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_AddColumnFamilyStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9336,44 +9617,41 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getTableNameFieldBuilder(); getColumnfamilySchemaFieldBuilder(); getUnmodifiedTableSchemaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (columnfamilySchemaBuilder_ == null) { - columnfamilySchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + columnfamilySchema_ = null; } else { columnfamilySchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + unmodifiedTableSchema_ = null; } else { unmodifiedTableSchemaBuilder_.clear(); } @@ -9381,10 +9659,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_AddColumnFamilyStateData_descriptor; @@ -9443,6 +9717,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData)other); @@ -9466,38 +9766,32 @@ public final class MasterProcedureProtos { if (other.hasUnmodifiedTableSchema()) { mergeUnmodifiedTableSchema(other.getUnmodifiedTableSchema()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasTableName()) { - return false; } if (!hasColumnfamilySchema()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (!getTableName().isInitialized()) { - return false; } if (!getColumnfamilySchema().isInitialized()) { - return false; } if (hasUnmodifiedTableSchema()) { if (!getUnmodifiedTableSchema().isInitialized()) { - return false; } } @@ -9513,7 +9807,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9523,9 +9817,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -9538,7 +9831,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -9579,6 +9872,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -9597,7 +9891,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -9620,19 +9914,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -9640,9 +9935,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required .hbase.pb.TableName table_name = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 2; @@ -9655,7 +9949,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -9696,6 +9990,7 @@ public final class MasterProcedureProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -9714,7 +10009,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -9737,19 +10032,20 @@ public final class MasterProcedureProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -9757,9 +10053,8 @@ public final class MasterProcedureProtos { return tableNameBuilder_; } - // required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnfamilySchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnfamilySchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnfamilySchemaBuilder_; /** * required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; @@ -9772,7 +10067,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnfamilySchema() { if (columnfamilySchemaBuilder_ == null) { - return columnfamilySchema_; + return columnfamilySchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnfamilySchema_; } else { return columnfamilySchemaBuilder_.getMessage(); } @@ -9813,6 +10108,7 @@ public final class MasterProcedureProtos { public Builder mergeColumnfamilySchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnfamilySchemaBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + columnfamilySchema_ != null && columnfamilySchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { columnfamilySchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnfamilySchema_).mergeFrom(value).buildPartial(); @@ -9831,7 +10127,7 @@ public final class MasterProcedureProtos { */ public Builder clearColumnfamilySchema() { if (columnfamilySchemaBuilder_ == null) { - columnfamilySchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + columnfamilySchema_ = null; onChanged(); } else { columnfamilySchemaBuilder_.clear(); @@ -9854,19 +10150,20 @@ public final class MasterProcedureProtos { if (columnfamilySchemaBuilder_ != null) { return columnfamilySchemaBuilder_.getMessageOrBuilder(); } else { - return columnfamilySchema_; + return columnfamilySchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnfamilySchema_; } } /** * required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnfamilySchemaFieldBuilder() { if (columnfamilySchemaBuilder_ == null) { - columnfamilySchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + columnfamilySchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( - columnfamilySchema_, + getColumnfamilySchema(), getParentForChildren(), isClean()); columnfamilySchema_ = null; @@ -9874,9 +10171,8 @@ public final class MasterProcedureProtos { return columnfamilySchemaBuilder_; } - // optional .hbase.pb.TableSchema unmodified_table_schema = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> unmodifiedTableSchemaBuilder_; /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; @@ -9889,7 +10185,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() { if (unmodifiedTableSchemaBuilder_ == null) { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } else { return unmodifiedTableSchemaBuilder_.getMessage(); } @@ -9930,6 +10226,7 @@ public final class MasterProcedureProtos { public Builder mergeUnmodifiedTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (unmodifiedTableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + unmodifiedTableSchema_ != null && unmodifiedTableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(unmodifiedTableSchema_).mergeFrom(value).buildPartial(); @@ -9948,7 +10245,7 @@ public final class MasterProcedureProtos { */ public Builder clearUnmodifiedTableSchema() { if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + unmodifiedTableSchema_ = null; onChanged(); } else { unmodifiedTableSchemaBuilder_.clear(); @@ -9971,41 +10268,79 @@ public final class MasterProcedureProtos { if (unmodifiedTableSchemaBuilder_ != null) { return unmodifiedTableSchemaBuilder_.getMessageOrBuilder(); } else { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } } /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getUnmodifiedTableSchemaFieldBuilder() { if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - unmodifiedTableSchema_, + getUnmodifiedTableSchema(), getParentForChildren(), isClean()); unmodifiedTableSchema_ = null; } return unmodifiedTableSchemaBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.AddColumnFamilyStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.AddColumnFamilyStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData DEFAULT_INSTANCE; static { - defaultInstance = new AddColumnFamilyStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AddColumnFamilyStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AddColumnFamilyStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.AddColumnFamilyStateData) } - public interface ModifyColumnFamilyStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ModifyColumnFamilyStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ModifyColumnFamilyStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -10019,7 +10354,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required .hbase.pb.TableName table_name = 2; /** * required .hbase.pb.TableName table_name = 2; */ @@ -10033,7 +10367,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; /** * required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; */ @@ -10047,7 +10380,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnfamilySchemaOrBuilder(); - // optional .hbase.pb.TableSchema unmodified_table_schema = 4; /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ @@ -10064,36 +10396,27 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.ModifyColumnFamilyStateData} */ - public static final class ModifyColumnFamilyStateData extends - com.google.protobuf.GeneratedMessage - implements ModifyColumnFamilyStateDataOrBuilder { + public static final class ModifyColumnFamilyStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ModifyColumnFamilyStateData) + ModifyColumnFamilyStateDataOrBuilder { // Use ModifyColumnFamilyStateData.newBuilder() to construct. - private ModifyColumnFamilyStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private ModifyColumnFamilyStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ModifyColumnFamilyStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ModifyColumnFamilyStateData defaultInstance; - public static ModifyColumnFamilyStateData getDefaultInstance() { - return defaultInstance; } - - public ModifyColumnFamilyStateData getDefaultInstanceForType() { - return defaultInstance; + private ModifyColumnFamilyStateData() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ModifyColumnFamilyStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -10170,7 +10493,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -10181,30 +10504,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyColumnFamilyStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyColumnFamilyStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ModifyColumnFamilyStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ModifyColumnFamilyStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -10217,16 +10524,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required .hbase.pb.TableName table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -10239,16 +10545,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; public static final int COLUMNFAMILY_SCHEMA_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnfamilySchema_; /** @@ -10261,16 +10566,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnfamilySchema() { - return columnfamilySchema_; + return columnfamilySchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnfamilySchema_; } /** * required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnfamilySchemaOrBuilder() { - return columnfamilySchema_; + return columnfamilySchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnfamilySchema_; } - // optional .hbase.pb.TableSchema unmodified_table_schema = 4; public static final int UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_; /** @@ -10283,25 +10587,20 @@ public final class MasterProcedureProtos { * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder() { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - columnfamilySchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -10339,57 +10638,49 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableName_); + output.writeMessage(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, columnfamilySchema_); + output.writeMessage(3, getColumnfamilySchema()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, unmodifiedTableSchema_); + output.writeMessage(4, getUnmodifiedTableSchema()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableName_); + .computeMessageSize(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, columnfamilySchema_); + .computeMessageSize(3, getColumnfamilySchema()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, unmodifiedTableSchema_); + .computeMessageSize(4, getUnmodifiedTableSchema()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -10420,12 +10711,10 @@ public final class MasterProcedureProtos { result = result && getUnmodifiedTableSchema() .equals(other.getUnmodifiedTableSchema()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -10449,7 +10738,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER; hash = (53 * hash) + getUnmodifiedTableSchema().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -10477,46 +10766,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -10524,14 +10824,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.ModifyColumnFamilyStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ModifyColumnFamilyStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyColumnFamilyStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyColumnFamilyStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10544,44 +10845,41 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getTableNameFieldBuilder(); getColumnfamilySchemaFieldBuilder(); getUnmodifiedTableSchemaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (columnfamilySchemaBuilder_ == null) { - columnfamilySchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + columnfamilySchema_ = null; } else { columnfamilySchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + unmodifiedTableSchema_ = null; } else { unmodifiedTableSchemaBuilder_.clear(); } @@ -10589,10 +10887,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ModifyColumnFamilyStateData_descriptor; @@ -10651,6 +10945,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData)other); @@ -10674,38 +10994,32 @@ public final class MasterProcedureProtos { if (other.hasUnmodifiedTableSchema()) { mergeUnmodifiedTableSchema(other.getUnmodifiedTableSchema()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasTableName()) { - return false; } if (!hasColumnfamilySchema()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (!getTableName().isInitialized()) { - return false; } if (!getColumnfamilySchema().isInitialized()) { - return false; } if (hasUnmodifiedTableSchema()) { if (!getUnmodifiedTableSchema().isInitialized()) { - return false; } } @@ -10721,7 +11035,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -10731,9 +11045,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -10746,7 +11059,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -10787,6 +11100,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -10805,7 +11119,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -10828,19 +11142,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -10848,9 +11163,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required .hbase.pb.TableName table_name = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 2; @@ -10863,7 +11177,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -10904,6 +11218,7 @@ public final class MasterProcedureProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -10922,7 +11237,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -10945,19 +11260,20 @@ public final class MasterProcedureProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -10965,9 +11281,8 @@ public final class MasterProcedureProtos { return tableNameBuilder_; } - // required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnfamilySchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnfamilySchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnfamilySchemaBuilder_; /** * required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; @@ -10980,7 +11295,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnfamilySchema() { if (columnfamilySchemaBuilder_ == null) { - return columnfamilySchema_; + return columnfamilySchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnfamilySchema_; } else { return columnfamilySchemaBuilder_.getMessage(); } @@ -11021,6 +11336,7 @@ public final class MasterProcedureProtos { public Builder mergeColumnfamilySchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnfamilySchemaBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + columnfamilySchema_ != null && columnfamilySchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { columnfamilySchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnfamilySchema_).mergeFrom(value).buildPartial(); @@ -11039,7 +11355,7 @@ public final class MasterProcedureProtos { */ public Builder clearColumnfamilySchema() { if (columnfamilySchemaBuilder_ == null) { - columnfamilySchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + columnfamilySchema_ = null; onChanged(); } else { columnfamilySchemaBuilder_.clear(); @@ -11062,19 +11378,20 @@ public final class MasterProcedureProtos { if (columnfamilySchemaBuilder_ != null) { return columnfamilySchemaBuilder_.getMessageOrBuilder(); } else { - return columnfamilySchema_; + return columnfamilySchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnfamilySchema_; } } /** * required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnfamilySchemaFieldBuilder() { if (columnfamilySchemaBuilder_ == null) { - columnfamilySchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + columnfamilySchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( - columnfamilySchema_, + getColumnfamilySchema(), getParentForChildren(), isClean()); columnfamilySchema_ = null; @@ -11082,9 +11399,8 @@ public final class MasterProcedureProtos { return columnfamilySchemaBuilder_; } - // optional .hbase.pb.TableSchema unmodified_table_schema = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> unmodifiedTableSchemaBuilder_; /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; @@ -11097,7 +11413,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() { if (unmodifiedTableSchemaBuilder_ == null) { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } else { return unmodifiedTableSchemaBuilder_.getMessage(); } @@ -11138,6 +11454,7 @@ public final class MasterProcedureProtos { public Builder mergeUnmodifiedTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (unmodifiedTableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + unmodifiedTableSchema_ != null && unmodifiedTableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(unmodifiedTableSchema_).mergeFrom(value).buildPartial(); @@ -11156,7 +11473,7 @@ public final class MasterProcedureProtos { */ public Builder clearUnmodifiedTableSchema() { if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + unmodifiedTableSchema_ = null; onChanged(); } else { unmodifiedTableSchemaBuilder_.clear(); @@ -11179,41 +11496,79 @@ public final class MasterProcedureProtos { if (unmodifiedTableSchemaBuilder_ != null) { return unmodifiedTableSchemaBuilder_.getMessageOrBuilder(); } else { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } } /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getUnmodifiedTableSchemaFieldBuilder() { if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - unmodifiedTableSchema_, + getUnmodifiedTableSchema(), getParentForChildren(), isClean()); unmodifiedTableSchema_ = null; } return unmodifiedTableSchemaBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ModifyColumnFamilyStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.ModifyColumnFamilyStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData DEFAULT_INSTANCE; static { - defaultInstance = new ModifyColumnFamilyStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ModifyColumnFamilyStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyColumnFamilyStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ModifyColumnFamilyStateData) } - public interface DeleteColumnFamilyStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteColumnFamilyStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteColumnFamilyStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -11227,7 +11582,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required .hbase.pb.TableName table_name = 2; /** * required .hbase.pb.TableName table_name = 2; */ @@ -11241,7 +11595,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required bytes columnfamily_name = 3; /** * required bytes columnfamily_name = 3; */ @@ -11251,7 +11604,6 @@ public final class MasterProcedureProtos { */ com.google.protobuf.ByteString getColumnfamilyName(); - // optional .hbase.pb.TableSchema unmodified_table_schema = 4; /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ @@ -11268,36 +11620,28 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.DeleteColumnFamilyStateData} */ - public static final class DeleteColumnFamilyStateData extends - com.google.protobuf.GeneratedMessage - implements DeleteColumnFamilyStateDataOrBuilder { + public static final class DeleteColumnFamilyStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteColumnFamilyStateData) + DeleteColumnFamilyStateDataOrBuilder { // Use DeleteColumnFamilyStateData.newBuilder() to construct. - private DeleteColumnFamilyStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteColumnFamilyStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DeleteColumnFamilyStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteColumnFamilyStateData defaultInstance; - public static DeleteColumnFamilyStateData getDefaultInstance() { - return defaultInstance; } - - public DeleteColumnFamilyStateData getDefaultInstanceForType() { - return defaultInstance; + private DeleteColumnFamilyStateData() { + columnfamilyName_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteColumnFamilyStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -11366,7 +11710,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11377,30 +11721,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteColumnFamilyStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteColumnFamilyStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteColumnFamilyStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteColumnFamilyStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -11413,16 +11741,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required .hbase.pb.TableName table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -11435,16 +11762,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // required bytes columnfamily_name = 3; public static final int COLUMNFAMILY_NAME_FIELD_NUMBER = 3; private com.google.protobuf.ByteString columnfamilyName_; /** @@ -11460,7 +11786,6 @@ public final class MasterProcedureProtos { return columnfamilyName_; } - // optional .hbase.pb.TableSchema unmodified_table_schema = 4; public static final int UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_; /** @@ -11473,25 +11798,20 @@ public final class MasterProcedureProtos { * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder() { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - columnfamilyName_ = com.google.protobuf.ByteString.EMPTY; - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -11525,35 +11845,33 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableName_); + output.writeMessage(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, columnfamilyName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, unmodifiedTableSchema_); + output.writeMessage(4, getUnmodifiedTableSchema()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableName_); + .computeMessageSize(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -11561,21 +11879,15 @@ public final class MasterProcedureProtos { } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, unmodifiedTableSchema_); + .computeMessageSize(4, getUnmodifiedTableSchema()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -11606,12 +11918,10 @@ public final class MasterProcedureProtos { result = result && getUnmodifiedTableSchema() .equals(other.getUnmodifiedTableSchema()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -11635,7 +11945,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER; hash = (53 * hash) + getUnmodifiedTableSchema().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -11663,46 +11973,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -11710,14 +12031,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.DeleteColumnFamilyStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteColumnFamilyStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteColumnFamilyStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteColumnFamilyStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11730,31 +12052,28 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getTableNameFieldBuilder(); getUnmodifiedTableSchemaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -11762,7 +12081,7 @@ public final class MasterProcedureProtos { columnfamilyName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + unmodifiedTableSchema_ = null; } else { unmodifiedTableSchemaBuilder_.clear(); } @@ -11770,10 +12089,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DeleteColumnFamilyStateData_descriptor; @@ -11828,6 +12143,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData)other); @@ -11851,34 +12192,29 @@ public final class MasterProcedureProtos { if (other.hasUnmodifiedTableSchema()) { mergeUnmodifiedTableSchema(other.getUnmodifiedTableSchema()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasTableName()) { - return false; } if (!hasColumnfamilyName()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (!getTableName().isInitialized()) { - return false; } if (hasUnmodifiedTableSchema()) { if (!getUnmodifiedTableSchema().isInitialized()) { - return false; } } @@ -11894,7 +12230,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11904,9 +12240,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -11919,7 +12254,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -11960,6 +12295,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -11978,7 +12314,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -12001,19 +12337,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -12021,9 +12358,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required .hbase.pb.TableName table_name = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 2; @@ -12036,7 +12372,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -12077,6 +12413,7 @@ public final class MasterProcedureProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -12095,7 +12432,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -12118,19 +12455,20 @@ public final class MasterProcedureProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -12138,7 +12476,6 @@ public final class MasterProcedureProtos { return tableNameBuilder_; } - // required bytes columnfamily_name = 3; private com.google.protobuf.ByteString columnfamilyName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes columnfamily_name = 3; @@ -12174,9 +12511,8 @@ public final class MasterProcedureProtos { return this; } - // optional .hbase.pb.TableSchema unmodified_table_schema = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> unmodifiedTableSchemaBuilder_; /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; @@ -12189,7 +12525,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() { if (unmodifiedTableSchemaBuilder_ == null) { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } else { return unmodifiedTableSchemaBuilder_.getMessage(); } @@ -12230,6 +12566,7 @@ public final class MasterProcedureProtos { public Builder mergeUnmodifiedTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (unmodifiedTableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + unmodifiedTableSchema_ != null && unmodifiedTableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(unmodifiedTableSchema_).mergeFrom(value).buildPartial(); @@ -12248,7 +12585,7 @@ public final class MasterProcedureProtos { */ public Builder clearUnmodifiedTableSchema() { if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + unmodifiedTableSchema_ = null; onChanged(); } else { unmodifiedTableSchemaBuilder_.clear(); @@ -12271,41 +12608,79 @@ public final class MasterProcedureProtos { if (unmodifiedTableSchemaBuilder_ != null) { return unmodifiedTableSchemaBuilder_.getMessageOrBuilder(); } else { - return unmodifiedTableSchema_; + return unmodifiedTableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : unmodifiedTableSchema_; } } /** * optional .hbase.pb.TableSchema unmodified_table_schema = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getUnmodifiedTableSchemaFieldBuilder() { if (unmodifiedTableSchemaBuilder_ == null) { - unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - unmodifiedTableSchema_, + getUnmodifiedTableSchema(), getParentForChildren(), isClean()); unmodifiedTableSchema_ = null; } return unmodifiedTableSchemaBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteColumnFamilyStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteColumnFamilyStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData DEFAULT_INSTANCE; static { - defaultInstance = new DeleteColumnFamilyStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteColumnFamilyStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteColumnFamilyStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteColumnFamilyStateData) } - public interface EnableTableStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface EnableTableStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.EnableTableStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -12319,7 +12694,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required .hbase.pb.TableName table_name = 2; /** * required .hbase.pb.TableName table_name = 2; */ @@ -12333,7 +12707,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required bool skip_table_state_check = 3; /** * required bool skip_table_state_check = 3; */ @@ -12346,36 +12719,28 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.EnableTableStateData} */ - public static final class EnableTableStateData extends - com.google.protobuf.GeneratedMessage - implements EnableTableStateDataOrBuilder { + public static final class EnableTableStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.EnableTableStateData) + EnableTableStateDataOrBuilder { // Use EnableTableStateData.newBuilder() to construct. - private EnableTableStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private EnableTableStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private EnableTableStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final EnableTableStateData defaultInstance; - public static EnableTableStateData getDefaultInstance() { - return defaultInstance; - } - - public EnableTableStateData getDefaultInstanceForType() { - return defaultInstance; + private EnableTableStateData() { + skipTableStateCheck_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private EnableTableStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -12431,7 +12796,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -12442,30 +12807,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_EnableTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_EnableTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public EnableTableStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new EnableTableStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -12478,16 +12827,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required .hbase.pb.TableName table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -12500,16 +12848,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // required bool skip_table_state_check = 3; public static final int SKIP_TABLE_STATE_CHECK_FIELD_NUMBER = 3; private boolean skipTableStateCheck_; /** @@ -12525,15 +12872,11 @@ public final class MasterProcedureProtos { return skipTableStateCheck_; } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - skipTableStateCheck_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -12561,50 +12904,42 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableName_); + output.writeMessage(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, skipTableStateCheck_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableName_); + .computeMessageSize(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, skipTableStateCheck_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -12630,12 +12965,10 @@ public final class MasterProcedureProtos { result = result && (getSkipTableStateCheck() == other.getSkipTableStateCheck()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -12653,9 +12986,10 @@ public final class MasterProcedureProtos { } if (hasSkipTableStateCheck()) { hash = (37 * hash) + SKIP_TABLE_STATE_CHECK_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getSkipTableStateCheck()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getSkipTableStateCheck()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -12683,46 +13017,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -12730,14 +13075,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.EnableTableStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.EnableTableStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_EnableTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_EnableTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -12750,30 +13096,27 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -12783,10 +13126,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_EnableTableStateData_descriptor; @@ -12833,6 +13172,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData)other); @@ -12853,29 +13218,25 @@ public final class MasterProcedureProtos { if (other.hasSkipTableStateCheck()) { setSkipTableStateCheck(other.getSkipTableStateCheck()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasTableName()) { - return false; } if (!hasSkipTableStateCheck()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -12890,7 +13251,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -12900,9 +13261,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -12915,7 +13275,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -12956,6 +13316,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -12974,7 +13335,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -12997,19 +13358,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -13017,9 +13379,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required .hbase.pb.TableName table_name = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 2; @@ -13032,7 +13393,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -13073,6 +13434,7 @@ public final class MasterProcedureProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -13091,7 +13453,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -13114,19 +13476,20 @@ public final class MasterProcedureProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -13134,7 +13497,6 @@ public final class MasterProcedureProtos { return tableNameBuilder_; } - // required bool skip_table_state_check = 3; private boolean skipTableStateCheck_ ; /** * required bool skip_table_state_check = 3; @@ -13166,22 +13528,59 @@ public final class MasterProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.EnableTableStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.EnableTableStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData DEFAULT_INSTANCE; static { - defaultInstance = new EnableTableStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public EnableTableStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EnableTableStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.EnableTableStateData) } - public interface DisableTableStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DisableTableStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DisableTableStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -13195,7 +13594,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required .hbase.pb.TableName table_name = 2; /** * required .hbase.pb.TableName table_name = 2; */ @@ -13209,7 +13607,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required bool skip_table_state_check = 3; /** * required bool skip_table_state_check = 3; */ @@ -13222,36 +13619,28 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.DisableTableStateData} */ - public static final class DisableTableStateData extends - com.google.protobuf.GeneratedMessage - implements DisableTableStateDataOrBuilder { + public static final class DisableTableStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DisableTableStateData) + DisableTableStateDataOrBuilder { // Use DisableTableStateData.newBuilder() to construct. - private DisableTableStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private DisableTableStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DisableTableStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DisableTableStateData defaultInstance; - public static DisableTableStateData getDefaultInstance() { - return defaultInstance; } - - public DisableTableStateData getDefaultInstanceForType() { - return defaultInstance; + private DisableTableStateData() { + skipTableStateCheck_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DisableTableStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -13307,7 +13696,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -13318,30 +13707,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DisableTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DisableTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DisableTableStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DisableTableStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -13354,16 +13727,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required .hbase.pb.TableName table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -13376,16 +13748,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // required bool skip_table_state_check = 3; public static final int SKIP_TABLE_STATE_CHECK_FIELD_NUMBER = 3; private boolean skipTableStateCheck_; /** @@ -13401,15 +13772,11 @@ public final class MasterProcedureProtos { return skipTableStateCheck_; } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - skipTableStateCheck_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -13437,50 +13804,42 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableName_); + output.writeMessage(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, skipTableStateCheck_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableName_); + .computeMessageSize(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, skipTableStateCheck_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -13506,12 +13865,10 @@ public final class MasterProcedureProtos { result = result && (getSkipTableStateCheck() == other.getSkipTableStateCheck()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -13529,9 +13886,10 @@ public final class MasterProcedureProtos { } if (hasSkipTableStateCheck()) { hash = (37 * hash) + SKIP_TABLE_STATE_CHECK_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getSkipTableStateCheck()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getSkipTableStateCheck()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -13559,46 +13917,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -13606,14 +13975,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.DisableTableStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DisableTableStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DisableTableStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DisableTableStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -13626,30 +13996,27 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -13659,10 +14026,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DisableTableStateData_descriptor; @@ -13709,6 +14072,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData)other); @@ -13729,29 +14118,25 @@ public final class MasterProcedureProtos { if (other.hasSkipTableStateCheck()) { setSkipTableStateCheck(other.getSkipTableStateCheck()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasTableName()) { - return false; } if (!hasSkipTableStateCheck()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -13766,7 +14151,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -13776,9 +14161,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -13791,7 +14175,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -13832,6 +14216,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -13850,7 +14235,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -13873,19 +14258,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -13893,9 +14279,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required .hbase.pb.TableName table_name = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 2; @@ -13908,7 +14293,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -13949,6 +14334,7 @@ public final class MasterProcedureProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -13967,7 +14353,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -13990,19 +14376,20 @@ public final class MasterProcedureProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -14010,7 +14397,6 @@ public final class MasterProcedureProtos { return tableNameBuilder_; } - // required bool skip_table_state_check = 3; private boolean skipTableStateCheck_ ; /** * required bool skip_table_state_check = 3; @@ -14042,22 +14428,59 @@ public final class MasterProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DisableTableStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.DisableTableStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData DEFAULT_INSTANCE; static { - defaultInstance = new DisableTableStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DisableTableStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DisableTableStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DisableTableStateData) } - public interface RestoreParentToChildRegionsPairOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RestoreParentToChildRegionsPairOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RestoreParentToChildRegionsPair) + com.google.protobuf.MessageOrBuilder { - // required string parent_region_name = 1; /** * required string parent_region_name = 1; */ @@ -14072,7 +14495,6 @@ public final class MasterProcedureProtos { com.google.protobuf.ByteString getParentRegionNameBytes(); - // required string child1_region_name = 2; /** * required string child1_region_name = 2; */ @@ -14087,7 +14509,6 @@ public final class MasterProcedureProtos { com.google.protobuf.ByteString getChild1RegionNameBytes(); - // required string child2_region_name = 3; /** * required string child2_region_name = 3; */ @@ -14105,36 +14526,30 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.RestoreParentToChildRegionsPair} */ - public static final class RestoreParentToChildRegionsPair extends - com.google.protobuf.GeneratedMessage - implements RestoreParentToChildRegionsPairOrBuilder { + public static final class RestoreParentToChildRegionsPair extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RestoreParentToChildRegionsPair) + RestoreParentToChildRegionsPairOrBuilder { // Use RestoreParentToChildRegionsPair.newBuilder() to construct. - private RestoreParentToChildRegionsPair(com.google.protobuf.GeneratedMessage.Builder builder) { + private RestoreParentToChildRegionsPair(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RestoreParentToChildRegionsPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RestoreParentToChildRegionsPair defaultInstance; - public static RestoreParentToChildRegionsPair getDefaultInstance() { - return defaultInstance; } - - public RestoreParentToChildRegionsPair getDefaultInstanceForType() { - return defaultInstance; + private RestoreParentToChildRegionsPair() { + parentRegionName_ = ""; + child1RegionName_ = ""; + child2RegionName_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RestoreParentToChildRegionsPair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -14154,18 +14569,21 @@ public final class MasterProcedureProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - parentRegionName_ = input.readBytes(); + parentRegionName_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - child1RegionName_ = input.readBytes(); + child1RegionName_ = bs; break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - child2RegionName_ = input.readBytes(); + child2RegionName_ = bs; break; } } @@ -14174,7 +14592,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -14185,32 +14603,16 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_RestoreParentToChildRegionsPair_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_RestoreParentToChildRegionsPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RestoreParentToChildRegionsPair parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RestoreParentToChildRegionsPair(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string parent_region_name = 1; public static final int PARENT_REGION_NAME_FIELD_NUMBER = 1; - private java.lang.Object parentRegionName_; + private volatile java.lang.Object parentRegionName_; /** * required string parent_region_name = 1; */ @@ -14251,9 +14653,8 @@ public final class MasterProcedureProtos { } } - // required string child1_region_name = 2; public static final int CHILD1_REGION_NAME_FIELD_NUMBER = 2; - private java.lang.Object child1RegionName_; + private volatile java.lang.Object child1RegionName_; /** * required string child1_region_name = 2; */ @@ -14294,9 +14695,8 @@ public final class MasterProcedureProtos { } } - // required string child2_region_name = 3; public static final int CHILD2_REGION_NAME_FIELD_NUMBER = 3; - private java.lang.Object child2RegionName_; + private volatile java.lang.Object child2RegionName_; /** * required string child2_region_name = 3; */ @@ -14337,15 +14737,11 @@ public final class MasterProcedureProtos { } } - private void initFields() { - parentRegionName_ = ""; - child1RegionName_ = ""; - child2RegionName_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasParentRegionName()) { memoizedIsInitialized = 0; @@ -14365,50 +14761,39 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getParentRegionNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parentRegionName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getChild1RegionNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, child1RegionName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getChild2RegionNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, child2RegionName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getParentRegionNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parentRegionName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getChild1RegionNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, child1RegionName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getChild2RegionNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, child2RegionName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -14434,12 +14819,10 @@ public final class MasterProcedureProtos { result = result && getChild2RegionName() .equals(other.getChild2RegionName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -14459,7 +14842,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + CHILD2_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getChild2RegionName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -14487,46 +14870,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -14534,14 +14928,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.RestoreParentToChildRegionsPair} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPairOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RestoreParentToChildRegionsPair) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPairOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_RestoreParentToChildRegionsPair_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_RestoreParentToChildRegionsPair_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -14554,18 +14949,15 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); parentRegionName_ = ""; @@ -14577,10 +14969,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_RestoreParentToChildRegionsPair_descriptor; @@ -14619,6 +15007,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair)other); @@ -14645,21 +15059,19 @@ public final class MasterProcedureProtos { child2RegionName_ = other.child2RegionName_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasParentRegionName()) { - return false; } if (!hasChild1RegionName()) { - return false; } if (!hasChild2RegionName()) { - return false; } return true; @@ -14674,7 +15086,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -14684,7 +15096,6 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required string parent_region_name = 1; private java.lang.Object parentRegionName_ = ""; /** * required string parent_region_name = 1; @@ -14698,9 +15109,12 @@ public final class MasterProcedureProtos { public java.lang.String getParentRegionName() { java.lang.Object ref = parentRegionName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - parentRegionName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + parentRegionName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -14758,7 +15172,6 @@ public final class MasterProcedureProtos { return this; } - // required string child1_region_name = 2; private java.lang.Object child1RegionName_ = ""; /** * required string child1_region_name = 2; @@ -14772,9 +15185,12 @@ public final class MasterProcedureProtos { public java.lang.String getChild1RegionName() { java.lang.Object ref = child1RegionName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - child1RegionName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + child1RegionName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -14832,7 +15248,6 @@ public final class MasterProcedureProtos { return this; } - // required string child2_region_name = 3; private java.lang.Object child2RegionName_ = ""; /** * required string child2_region_name = 3; @@ -14846,9 +15261,12 @@ public final class MasterProcedureProtos { public java.lang.String getChild2RegionName() { java.lang.Object ref = child2RegionName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - child2RegionName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + child2RegionName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -14905,22 +15323,59 @@ public final class MasterProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RestoreParentToChildRegionsPair) } + // @@protoc_insertion_point(class_scope:hbase.pb.RestoreParentToChildRegionsPair) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair DEFAULT_INSTANCE; static { - defaultInstance = new RestoreParentToChildRegionsPair(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RestoreParentToChildRegionsPair parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RestoreParentToChildRegionsPair(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RestoreParentToChildRegionsPair) } - public interface CloneSnapshotStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CloneSnapshotStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CloneSnapshotStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -14934,7 +15389,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required .hbase.pb.SnapshotDescription snapshot = 2; /** * required .hbase.pb.SnapshotDescription snapshot = 2; */ @@ -14948,7 +15402,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); - // required .hbase.pb.TableSchema table_schema = 3; /** * required .hbase.pb.TableSchema table_schema = 3; */ @@ -14962,7 +15415,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); - // repeated .hbase.pb.RegionInfo region_info = 4; /** * repeated .hbase.pb.RegionInfo region_info = 4; */ @@ -14987,7 +15439,6 @@ public final class MasterProcedureProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( int index); - // repeated .hbase.pb.RestoreParentToChildRegionsPair parent_to_child_regions_pair_list = 5; /** * repeated .hbase.pb.RestoreParentToChildRegionsPair parent_to_child_regions_pair_list = 5; */ @@ -15015,36 +15466,29 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.CloneSnapshotStateData} */ - public static final class CloneSnapshotStateData extends - com.google.protobuf.GeneratedMessage - implements CloneSnapshotStateDataOrBuilder { + public static final class CloneSnapshotStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CloneSnapshotStateData) + CloneSnapshotStateDataOrBuilder { // Use CloneSnapshotStateData.newBuilder() to construct. - private CloneSnapshotStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private CloneSnapshotStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CloneSnapshotStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CloneSnapshotStateData defaultInstance; - public static CloneSnapshotStateData getDefaultInstance() { - return defaultInstance; - } - - public CloneSnapshotStateData getDefaultInstanceForType() { - return defaultInstance; + private CloneSnapshotStateData() { + regionInfo_ = java.util.Collections.emptyList(); + parentToChildRegionsPairList_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CloneSnapshotStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -15107,7 +15551,8 @@ public final class MasterProcedureProtos { regionInfo_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } - regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionInfo_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } case 42: { @@ -15115,7 +15560,8 @@ public final class MasterProcedureProtos { parentToChildRegionsPairList_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000010; } - parentToChildRegionsPairList_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.PARSER, extensionRegistry)); + parentToChildRegionsPairList_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.PARSER, extensionRegistry)); break; } } @@ -15124,7 +15570,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); @@ -15141,30 +15587,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CloneSnapshotStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CloneSnapshotStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CloneSnapshotStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CloneSnapshotStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -15177,16 +15607,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required .hbase.pb.SnapshotDescription snapshot = 2; public static final int SNAPSHOT_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** @@ -15199,16 +15628,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.SnapshotDescription snapshot = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } /** * required .hbase.pb.SnapshotDescription snapshot = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } - // required .hbase.pb.TableSchema table_schema = 3; public static final int TABLE_SCHEMA_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_; /** @@ -15221,16 +15649,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableSchema table_schema = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } /** * required .hbase.pb.TableSchema table_schema = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } - // repeated .hbase.pb.RegionInfo region_info = 4; public static final int REGION_INFO_FIELD_NUMBER = 4; private java.util.List regionInfo_; /** @@ -15266,7 +15693,6 @@ public final class MasterProcedureProtos { return regionInfo_.get(index); } - // repeated .hbase.pb.RestoreParentToChildRegionsPair parent_to_child_regions_pair_list = 5; public static final int PARENT_TO_CHILD_REGIONS_PAIR_LIST_FIELD_NUMBER = 5; private java.util.List parentToChildRegionsPairList_; /** @@ -15302,17 +15728,11 @@ public final class MasterProcedureProtos { return parentToChildRegionsPairList_.get(index); } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - regionInfo_ = java.util.Collections.emptyList(); - parentToChildRegionsPairList_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -15356,15 +15776,14 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, snapshot_); + output.writeMessage(2, getSnapshot()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, tableSchema_); + output.writeMessage(3, getTableSchema()); } for (int i = 0; i < regionInfo_.size(); i++) { output.writeMessage(4, regionInfo_.get(i)); @@ -15372,26 +15791,25 @@ public final class MasterProcedureProtos { for (int i = 0; i < parentToChildRegionsPairList_.size(); i++) { output.writeMessage(5, parentToChildRegionsPairList_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, snapshot_); + .computeMessageSize(2, getSnapshot()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, tableSchema_); + .computeMessageSize(3, getTableSchema()); } for (int i = 0; i < regionInfo_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -15401,19 +15819,13 @@ public final class MasterProcedureProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, parentToChildRegionsPairList_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -15443,12 +15855,10 @@ public final class MasterProcedureProtos { .equals(other.getRegionInfoList()); result = result && getParentToChildRegionsPairListList() .equals(other.getParentToChildRegionsPairListList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -15476,7 +15886,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + PARENT_TO_CHILD_REGIONS_PAIR_LIST_FIELD_NUMBER; hash = (53 * hash) + getParentToChildRegionsPairListList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -15504,46 +15914,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -15551,14 +15972,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.CloneSnapshotStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CloneSnapshotStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CloneSnapshotStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CloneSnapshotStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -15571,12 +15993,13 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getSnapshotFieldBuilder(); getTableSchemaFieldBuilder(); @@ -15584,26 +16007,22 @@ public final class MasterProcedureProtos { getParentToChildRegionsPairListFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; } else { tableSchemaBuilder_.clear(); } @@ -15623,10 +16042,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_CloneSnapshotStateData_descriptor; @@ -15695,6 +16110,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData)other); @@ -15734,7 +16175,7 @@ public final class MasterProcedureProtos { regionInfo_ = other.regionInfo_; bitField0_ = (bitField0_ & ~0x00000008); regionInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionInfoFieldBuilder() : null; } else { regionInfoBuilder_.addAllMessages(other.regionInfo_); @@ -15760,51 +16201,44 @@ public final class MasterProcedureProtos { parentToChildRegionsPairList_ = other.parentToChildRegionsPairList_; bitField0_ = (bitField0_ & ~0x00000010); parentToChildRegionsPairListBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getParentToChildRegionsPairListFieldBuilder() : null; } else { parentToChildRegionsPairListBuilder_.addAllMessages(other.parentToChildRegionsPairList_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasSnapshot()) { - return false; } if (!hasTableSchema()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (!getSnapshot().isInitialized()) { - return false; } if (!getTableSchema().isInitialized()) { - return false; } for (int i = 0; i < getRegionInfoCount(); i++) { if (!getRegionInfo(i).isInitialized()) { - return false; } } for (int i = 0; i < getParentToChildRegionsPairListCount(); i++) { if (!getParentToChildRegionsPairList(i).isInitialized()) { - return false; } } @@ -15820,7 +16254,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15830,9 +16264,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -15845,7 +16278,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -15886,6 +16319,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -15904,7 +16338,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -15927,19 +16361,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -15947,9 +16382,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required .hbase.pb.SnapshotDescription snapshot = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * required .hbase.pb.SnapshotDescription snapshot = 2; @@ -15962,7 +16396,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } else { return snapshotBuilder_.getMessage(); } @@ -16003,6 +16437,7 @@ public final class MasterProcedureProtos { public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + snapshot_ != null && snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); @@ -16021,7 +16456,7 @@ public final class MasterProcedureProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; onChanged(); } else { snapshotBuilder_.clear(); @@ -16044,19 +16479,20 @@ public final class MasterProcedureProtos { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { - return snapshot_; + return snapshot_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } } /** * required .hbase.pb.SnapshotDescription snapshot = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { - snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< + snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( - snapshot_, + getSnapshot(), getParentForChildren(), isClean()); snapshot_ = null; @@ -16064,9 +16500,8 @@ public final class MasterProcedureProtos { return snapshotBuilder_; } - // required .hbase.pb.TableSchema table_schema = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** * required .hbase.pb.TableSchema table_schema = 3; @@ -16079,7 +16514,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } else { return tableSchemaBuilder_.getMessage(); } @@ -16120,6 +16555,7 @@ public final class MasterProcedureProtos { public Builder mergeTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + tableSchema_ != null && tableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); @@ -16138,7 +16574,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; onChanged(); } else { tableSchemaBuilder_.clear(); @@ -16161,19 +16597,20 @@ public final class MasterProcedureProtos { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); } else { - return tableSchema_; + return tableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } } /** * required .hbase.pb.TableSchema table_schema = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - tableSchema_, + getTableSchema(), getParentForChildren(), isClean()); tableSchema_ = null; @@ -16181,7 +16618,6 @@ public final class MasterProcedureProtos { return tableSchemaBuilder_; } - // repeated .hbase.pb.RegionInfo region_info = 4; private java.util.List regionInfo_ = java.util.Collections.emptyList(); private void ensureRegionInfoIsMutable() { @@ -16191,7 +16627,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** @@ -16323,7 +16759,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (regionInfoBuilder_ == null) { ensureRegionInfoIsMutable(); - super.addAll(values, regionInfo_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionInfo_); onChanged(); } else { regionInfoBuilder_.addAllMessages(values); @@ -16406,11 +16843,11 @@ public final class MasterProcedureProtos { getRegionInfoBuilderList() { return getRegionInfoFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionInfo_, ((bitField0_ & 0x00000008) == 0x00000008), @@ -16421,7 +16858,6 @@ public final class MasterProcedureProtos { return regionInfoBuilder_; } - // repeated .hbase.pb.RestoreParentToChildRegionsPair parent_to_child_regions_pair_list = 5; private java.util.List parentToChildRegionsPairList_ = java.util.Collections.emptyList(); private void ensureParentToChildRegionsPairListIsMutable() { @@ -16431,7 +16867,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPairOrBuilder> parentToChildRegionsPairListBuilder_; /** @@ -16563,7 +16999,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (parentToChildRegionsPairListBuilder_ == null) { ensureParentToChildRegionsPairListIsMutable(); - super.addAll(values, parentToChildRegionsPairList_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, parentToChildRegionsPairList_); onChanged(); } else { parentToChildRegionsPairListBuilder_.addAllMessages(values); @@ -16646,11 +17083,11 @@ public final class MasterProcedureProtos { getParentToChildRegionsPairListBuilderList() { return getParentToChildRegionsPairListFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPairOrBuilder> getParentToChildRegionsPairListFieldBuilder() { if (parentToChildRegionsPairListBuilder_ == null) { - parentToChildRegionsPairListBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + parentToChildRegionsPairListBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPairOrBuilder>( parentToChildRegionsPairList_, ((bitField0_ & 0x00000010) == 0x00000010), @@ -16660,22 +17097,59 @@ public final class MasterProcedureProtos { } return parentToChildRegionsPairListBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CloneSnapshotStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.CloneSnapshotStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData DEFAULT_INSTANCE; static { - defaultInstance = new CloneSnapshotStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CloneSnapshotStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CloneSnapshotStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CloneSnapshotStateData) } - public interface RestoreSnapshotStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RestoreSnapshotStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RestoreSnapshotStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -16689,7 +17163,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required .hbase.pb.SnapshotDescription snapshot = 2; /** * required .hbase.pb.SnapshotDescription snapshot = 2; */ @@ -16703,7 +17176,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); - // required .hbase.pb.TableSchema modified_table_schema = 3; /** * required .hbase.pb.TableSchema modified_table_schema = 3; */ @@ -16717,7 +17189,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getModifiedTableSchemaOrBuilder(); - // repeated .hbase.pb.RegionInfo region_info_for_restore = 4; /** * repeated .hbase.pb.RegionInfo region_info_for_restore = 4; */ @@ -16742,7 +17213,6 @@ public final class MasterProcedureProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoForRestoreOrBuilder( int index); - // repeated .hbase.pb.RegionInfo region_info_for_remove = 5; /** * repeated .hbase.pb.RegionInfo region_info_for_remove = 5; */ @@ -16767,7 +17237,6 @@ public final class MasterProcedureProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoForRemoveOrBuilder( int index); - // repeated .hbase.pb.RegionInfo region_info_for_add = 6; /** * repeated .hbase.pb.RegionInfo region_info_for_add = 6; */ @@ -16792,7 +17261,6 @@ public final class MasterProcedureProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoForAddOrBuilder( int index); - // repeated .hbase.pb.RestoreParentToChildRegionsPair parent_to_child_regions_pair_list = 7; /** * repeated .hbase.pb.RestoreParentToChildRegionsPair parent_to_child_regions_pair_list = 7; */ @@ -16820,36 +17288,31 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.RestoreSnapshotStateData} */ - public static final class RestoreSnapshotStateData extends - com.google.protobuf.GeneratedMessage - implements RestoreSnapshotStateDataOrBuilder { + public static final class RestoreSnapshotStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RestoreSnapshotStateData) + RestoreSnapshotStateDataOrBuilder { // Use RestoreSnapshotStateData.newBuilder() to construct. - private RestoreSnapshotStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private RestoreSnapshotStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RestoreSnapshotStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RestoreSnapshotStateData defaultInstance; - public static RestoreSnapshotStateData getDefaultInstance() { - return defaultInstance; - } - - public RestoreSnapshotStateData getDefaultInstanceForType() { - return defaultInstance; + private RestoreSnapshotStateData() { + regionInfoForRestore_ = java.util.Collections.emptyList(); + regionInfoForRemove_ = java.util.Collections.emptyList(); + regionInfoForAdd_ = java.util.Collections.emptyList(); + parentToChildRegionsPairList_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RestoreSnapshotStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -16912,7 +17375,8 @@ public final class MasterProcedureProtos { regionInfoForRestore_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } - regionInfoForRestore_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionInfoForRestore_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } case 42: { @@ -16920,7 +17384,8 @@ public final class MasterProcedureProtos { regionInfoForRemove_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000010; } - regionInfoForRemove_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionInfoForRemove_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } case 50: { @@ -16928,7 +17393,8 @@ public final class MasterProcedureProtos { regionInfoForAdd_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000020; } - regionInfoForAdd_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionInfoForAdd_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } case 58: { @@ -16936,7 +17402,8 @@ public final class MasterProcedureProtos { parentToChildRegionsPairList_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000040; } - parentToChildRegionsPairList_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.PARSER, extensionRegistry)); + parentToChildRegionsPairList_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.PARSER, extensionRegistry)); break; } } @@ -16945,7 +17412,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { regionInfoForRestore_ = java.util.Collections.unmodifiableList(regionInfoForRestore_); @@ -16968,30 +17435,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_RestoreSnapshotStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_RestoreSnapshotStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RestoreSnapshotStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RestoreSnapshotStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -17004,16 +17455,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required .hbase.pb.SnapshotDescription snapshot = 2; public static final int SNAPSHOT_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** @@ -17026,16 +17476,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.SnapshotDescription snapshot = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } /** * required .hbase.pb.SnapshotDescription snapshot = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } - // required .hbase.pb.TableSchema modified_table_schema = 3; public static final int MODIFIED_TABLE_SCHEMA_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema modifiedTableSchema_; /** @@ -17048,16 +17497,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableSchema modified_table_schema = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getModifiedTableSchema() { - return modifiedTableSchema_; + return modifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : modifiedTableSchema_; } /** * required .hbase.pb.TableSchema modified_table_schema = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getModifiedTableSchemaOrBuilder() { - return modifiedTableSchema_; + return modifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : modifiedTableSchema_; } - // repeated .hbase.pb.RegionInfo region_info_for_restore = 4; public static final int REGION_INFO_FOR_RESTORE_FIELD_NUMBER = 4; private java.util.List regionInfoForRestore_; /** @@ -17093,7 +17541,6 @@ public final class MasterProcedureProtos { return regionInfoForRestore_.get(index); } - // repeated .hbase.pb.RegionInfo region_info_for_remove = 5; public static final int REGION_INFO_FOR_REMOVE_FIELD_NUMBER = 5; private java.util.List regionInfoForRemove_; /** @@ -17129,7 +17576,6 @@ public final class MasterProcedureProtos { return regionInfoForRemove_.get(index); } - // repeated .hbase.pb.RegionInfo region_info_for_add = 6; public static final int REGION_INFO_FOR_ADD_FIELD_NUMBER = 6; private java.util.List regionInfoForAdd_; /** @@ -17165,7 +17611,6 @@ public final class MasterProcedureProtos { return regionInfoForAdd_.get(index); } - // repeated .hbase.pb.RestoreParentToChildRegionsPair parent_to_child_regions_pair_list = 7; public static final int PARENT_TO_CHILD_REGIONS_PAIR_LIST_FIELD_NUMBER = 7; private java.util.List parentToChildRegionsPairList_; /** @@ -17201,19 +17646,11 @@ public final class MasterProcedureProtos { return parentToChildRegionsPairList_.get(index); } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - modifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - regionInfoForRestore_ = java.util.Collections.emptyList(); - regionInfoForRemove_ = java.util.Collections.emptyList(); - regionInfoForAdd_ = java.util.Collections.emptyList(); - parentToChildRegionsPairList_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -17269,15 +17706,14 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, snapshot_); + output.writeMessage(2, getSnapshot()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, modifiedTableSchema_); + output.writeMessage(3, getModifiedTableSchema()); } for (int i = 0; i < regionInfoForRestore_.size(); i++) { output.writeMessage(4, regionInfoForRestore_.get(i)); @@ -17291,26 +17727,25 @@ public final class MasterProcedureProtos { for (int i = 0; i < parentToChildRegionsPairList_.size(); i++) { output.writeMessage(7, parentToChildRegionsPairList_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, snapshot_); + .computeMessageSize(2, getSnapshot()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, modifiedTableSchema_); + .computeMessageSize(3, getModifiedTableSchema()); } for (int i = 0; i < regionInfoForRestore_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -17328,19 +17763,13 @@ public final class MasterProcedureProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(7, parentToChildRegionsPairList_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -17374,12 +17803,10 @@ public final class MasterProcedureProtos { .equals(other.getRegionInfoForAddList()); result = result && getParentToChildRegionsPairListList() .equals(other.getParentToChildRegionsPairListList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -17415,7 +17842,7 @@ public final class MasterProcedureProtos { hash = (37 * hash) + PARENT_TO_CHILD_REGIONS_PAIR_LIST_FIELD_NUMBER; hash = (53 * hash) + getParentToChildRegionsPairListList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -17443,46 +17870,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -17490,14 +17928,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.RestoreSnapshotStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RestoreSnapshotStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_RestoreSnapshotStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_RestoreSnapshotStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -17510,12 +17949,13 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getSnapshotFieldBuilder(); getModifiedTableSchemaFieldBuilder(); @@ -17525,26 +17965,22 @@ public final class MasterProcedureProtos { getParentToChildRegionsPairListFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (modifiedTableSchemaBuilder_ == null) { - modifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + modifiedTableSchema_ = null; } else { modifiedTableSchemaBuilder_.clear(); } @@ -17576,10 +18012,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_RestoreSnapshotStateData_descriptor; @@ -17666,6 +18098,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData)other); @@ -17705,7 +18163,7 @@ public final class MasterProcedureProtos { regionInfoForRestore_ = other.regionInfoForRestore_; bitField0_ = (bitField0_ & ~0x00000008); regionInfoForRestoreBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionInfoForRestoreFieldBuilder() : null; } else { regionInfoForRestoreBuilder_.addAllMessages(other.regionInfoForRestore_); @@ -17731,7 +18189,7 @@ public final class MasterProcedureProtos { regionInfoForRemove_ = other.regionInfoForRemove_; bitField0_ = (bitField0_ & ~0x00000010); regionInfoForRemoveBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionInfoForRemoveFieldBuilder() : null; } else { regionInfoForRemoveBuilder_.addAllMessages(other.regionInfoForRemove_); @@ -17757,7 +18215,7 @@ public final class MasterProcedureProtos { regionInfoForAdd_ = other.regionInfoForAdd_; bitField0_ = (bitField0_ & ~0x00000020); regionInfoForAddBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionInfoForAddFieldBuilder() : null; } else { regionInfoForAddBuilder_.addAllMessages(other.regionInfoForAdd_); @@ -17783,63 +18241,54 @@ public final class MasterProcedureProtos { parentToChildRegionsPairList_ = other.parentToChildRegionsPairList_; bitField0_ = (bitField0_ & ~0x00000040); parentToChildRegionsPairListBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getParentToChildRegionsPairListFieldBuilder() : null; } else { parentToChildRegionsPairListBuilder_.addAllMessages(other.parentToChildRegionsPairList_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasSnapshot()) { - return false; } if (!hasModifiedTableSchema()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (!getSnapshot().isInitialized()) { - return false; } if (!getModifiedTableSchema().isInitialized()) { - return false; } for (int i = 0; i < getRegionInfoForRestoreCount(); i++) { if (!getRegionInfoForRestore(i).isInitialized()) { - return false; } } for (int i = 0; i < getRegionInfoForRemoveCount(); i++) { if (!getRegionInfoForRemove(i).isInitialized()) { - return false; } } for (int i = 0; i < getRegionInfoForAddCount(); i++) { if (!getRegionInfoForAdd(i).isInitialized()) { - return false; } } for (int i = 0; i < getParentToChildRegionsPairListCount(); i++) { if (!getParentToChildRegionsPairList(i).isInitialized()) { - return false; } } @@ -17855,7 +18304,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -17865,9 +18314,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -17880,7 +18328,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -17921,6 +18369,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -17939,7 +18388,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -17962,19 +18411,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -17982,9 +18432,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required .hbase.pb.SnapshotDescription snapshot = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * required .hbase.pb.SnapshotDescription snapshot = 2; @@ -17997,7 +18446,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } else { return snapshotBuilder_.getMessage(); } @@ -18038,6 +18487,7 @@ public final class MasterProcedureProtos { public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + snapshot_ != null && snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); @@ -18056,7 +18506,7 @@ public final class MasterProcedureProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; onChanged(); } else { snapshotBuilder_.clear(); @@ -18079,19 +18529,20 @@ public final class MasterProcedureProtos { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { - return snapshot_; + return snapshot_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } } /** * required .hbase.pb.SnapshotDescription snapshot = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { - snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< + snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( - snapshot_, + getSnapshot(), getParentForChildren(), isClean()); snapshot_ = null; @@ -18099,9 +18550,8 @@ public final class MasterProcedureProtos { return snapshotBuilder_; } - // required .hbase.pb.TableSchema modified_table_schema = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema modifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema modifiedTableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> modifiedTableSchemaBuilder_; /** * required .hbase.pb.TableSchema modified_table_schema = 3; @@ -18114,7 +18564,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getModifiedTableSchema() { if (modifiedTableSchemaBuilder_ == null) { - return modifiedTableSchema_; + return modifiedTableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : modifiedTableSchema_; } else { return modifiedTableSchemaBuilder_.getMessage(); } @@ -18155,6 +18605,7 @@ public final class MasterProcedureProtos { public Builder mergeModifiedTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (modifiedTableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + modifiedTableSchema_ != null && modifiedTableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { modifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(modifiedTableSchema_).mergeFrom(value).buildPartial(); @@ -18173,7 +18624,7 @@ public final class MasterProcedureProtos { */ public Builder clearModifiedTableSchema() { if (modifiedTableSchemaBuilder_ == null) { - modifiedTableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + modifiedTableSchema_ = null; onChanged(); } else { modifiedTableSchemaBuilder_.clear(); @@ -18196,19 +18647,20 @@ public final class MasterProcedureProtos { if (modifiedTableSchemaBuilder_ != null) { return modifiedTableSchemaBuilder_.getMessageOrBuilder(); } else { - return modifiedTableSchema_; + return modifiedTableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : modifiedTableSchema_; } } /** * required .hbase.pb.TableSchema modified_table_schema = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getModifiedTableSchemaFieldBuilder() { if (modifiedTableSchemaBuilder_ == null) { - modifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + modifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - modifiedTableSchema_, + getModifiedTableSchema(), getParentForChildren(), isClean()); modifiedTableSchema_ = null; @@ -18216,7 +18668,6 @@ public final class MasterProcedureProtos { return modifiedTableSchemaBuilder_; } - // repeated .hbase.pb.RegionInfo region_info_for_restore = 4; private java.util.List regionInfoForRestore_ = java.util.Collections.emptyList(); private void ensureRegionInfoForRestoreIsMutable() { @@ -18226,7 +18677,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoForRestoreBuilder_; /** @@ -18358,7 +18809,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (regionInfoForRestoreBuilder_ == null) { ensureRegionInfoForRestoreIsMutable(); - super.addAll(values, regionInfoForRestore_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionInfoForRestore_); onChanged(); } else { regionInfoForRestoreBuilder_.addAllMessages(values); @@ -18441,11 +18893,11 @@ public final class MasterProcedureProtos { getRegionInfoForRestoreBuilderList() { return getRegionInfoForRestoreFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoForRestoreFieldBuilder() { if (regionInfoForRestoreBuilder_ == null) { - regionInfoForRestoreBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionInfoForRestoreBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionInfoForRestore_, ((bitField0_ & 0x00000008) == 0x00000008), @@ -18456,7 +18908,6 @@ public final class MasterProcedureProtos { return regionInfoForRestoreBuilder_; } - // repeated .hbase.pb.RegionInfo region_info_for_remove = 5; private java.util.List regionInfoForRemove_ = java.util.Collections.emptyList(); private void ensureRegionInfoForRemoveIsMutable() { @@ -18466,7 +18917,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoForRemoveBuilder_; /** @@ -18598,7 +19049,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (regionInfoForRemoveBuilder_ == null) { ensureRegionInfoForRemoveIsMutable(); - super.addAll(values, regionInfoForRemove_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionInfoForRemove_); onChanged(); } else { regionInfoForRemoveBuilder_.addAllMessages(values); @@ -18681,11 +19133,11 @@ public final class MasterProcedureProtos { getRegionInfoForRemoveBuilderList() { return getRegionInfoForRemoveFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoForRemoveFieldBuilder() { if (regionInfoForRemoveBuilder_ == null) { - regionInfoForRemoveBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionInfoForRemoveBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionInfoForRemove_, ((bitField0_ & 0x00000010) == 0x00000010), @@ -18696,7 +19148,6 @@ public final class MasterProcedureProtos { return regionInfoForRemoveBuilder_; } - // repeated .hbase.pb.RegionInfo region_info_for_add = 6; private java.util.List regionInfoForAdd_ = java.util.Collections.emptyList(); private void ensureRegionInfoForAddIsMutable() { @@ -18706,7 +19157,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoForAddBuilder_; /** @@ -18838,7 +19289,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (regionInfoForAddBuilder_ == null) { ensureRegionInfoForAddIsMutable(); - super.addAll(values, regionInfoForAdd_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionInfoForAdd_); onChanged(); } else { regionInfoForAddBuilder_.addAllMessages(values); @@ -18921,11 +19373,11 @@ public final class MasterProcedureProtos { getRegionInfoForAddBuilderList() { return getRegionInfoForAddFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoForAddFieldBuilder() { if (regionInfoForAddBuilder_ == null) { - regionInfoForAddBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionInfoForAddBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionInfoForAdd_, ((bitField0_ & 0x00000020) == 0x00000020), @@ -18936,7 +19388,6 @@ public final class MasterProcedureProtos { return regionInfoForAddBuilder_; } - // repeated .hbase.pb.RestoreParentToChildRegionsPair parent_to_child_regions_pair_list = 7; private java.util.List parentToChildRegionsPairList_ = java.util.Collections.emptyList(); private void ensureParentToChildRegionsPairListIsMutable() { @@ -18946,7 +19397,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPairOrBuilder> parentToChildRegionsPairListBuilder_; /** @@ -19078,7 +19529,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (parentToChildRegionsPairListBuilder_ == null) { ensureParentToChildRegionsPairListIsMutable(); - super.addAll(values, parentToChildRegionsPairList_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, parentToChildRegionsPairList_); onChanged(); } else { parentToChildRegionsPairListBuilder_.addAllMessages(values); @@ -19161,11 +19613,11 @@ public final class MasterProcedureProtos { getParentToChildRegionsPairListBuilderList() { return getParentToChildRegionsPairListFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPairOrBuilder> getParentToChildRegionsPairListFieldBuilder() { if (parentToChildRegionsPairListBuilder_ == null) { - parentToChildRegionsPairListBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + parentToChildRegionsPairListBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPairOrBuilder>( parentToChildRegionsPairList_, ((bitField0_ & 0x00000040) == 0x00000040), @@ -19175,22 +19627,59 @@ public final class MasterProcedureProtos { } return parentToChildRegionsPairListBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RestoreSnapshotStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.RestoreSnapshotStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData DEFAULT_INSTANCE; static { - defaultInstance = new RestoreSnapshotStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RestoreSnapshotStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RestoreSnapshotStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RestoreSnapshotStateData) } - public interface DispatchMergingRegionsStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DispatchMergingRegionsStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DispatchMergingRegionsStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.UserInformation user_info = 1; /** * required .hbase.pb.UserInformation user_info = 1; */ @@ -19204,7 +19693,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // required .hbase.pb.TableName table_name = 2; /** * required .hbase.pb.TableName table_name = 2; */ @@ -19218,7 +19706,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // repeated .hbase.pb.RegionInfo region_info = 3; /** * repeated .hbase.pb.RegionInfo region_info = 3; */ @@ -19243,7 +19730,6 @@ public final class MasterProcedureProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( int index); - // optional bool forcible = 4; /** * optional bool forcible = 4; */ @@ -19256,36 +19742,29 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.DispatchMergingRegionsStateData} */ - public static final class DispatchMergingRegionsStateData extends - com.google.protobuf.GeneratedMessage - implements DispatchMergingRegionsStateDataOrBuilder { + public static final class DispatchMergingRegionsStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DispatchMergingRegionsStateData) + DispatchMergingRegionsStateDataOrBuilder { // Use DispatchMergingRegionsStateData.newBuilder() to construct. - private DispatchMergingRegionsStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private DispatchMergingRegionsStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DispatchMergingRegionsStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DispatchMergingRegionsStateData defaultInstance; - public static DispatchMergingRegionsStateData getDefaultInstance() { - return defaultInstance; } - - public DispatchMergingRegionsStateData getDefaultInstanceForType() { - return defaultInstance; + private DispatchMergingRegionsStateData() { + regionInfo_ = java.util.Collections.emptyList(); + forcible_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DispatchMergingRegionsStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -19335,7 +19814,8 @@ public final class MasterProcedureProtos { regionInfo_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionInfo_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } case 32: { @@ -19349,7 +19829,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); @@ -19363,30 +19843,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DispatchMergingRegionsStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DispatchMergingRegionsStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DispatchMergingRegionsStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DispatchMergingRegionsStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -19399,16 +19863,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * required .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // required .hbase.pb.TableName table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -19421,16 +19884,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // repeated .hbase.pb.RegionInfo region_info = 3; public static final int REGION_INFO_FIELD_NUMBER = 3; private java.util.List regionInfo_; /** @@ -19466,7 +19928,6 @@ public final class MasterProcedureProtos { return regionInfo_.get(index); } - // optional bool forcible = 4; public static final int FORCIBLE_FIELD_NUMBER = 4; private boolean forcible_; /** @@ -19482,16 +19943,11 @@ public final class MasterProcedureProtos { return forcible_; } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - regionInfo_ = java.util.Collections.emptyList(); - forcible_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasUserInfo()) { memoizedIsInitialized = 0; @@ -19521,12 +19977,11 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableName_); + output.writeMessage(2, getTableName()); } for (int i = 0; i < regionInfo_.size(); i++) { output.writeMessage(3, regionInfo_.get(i)); @@ -19534,22 +19989,21 @@ public final class MasterProcedureProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(4, forcible_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableName_); + .computeMessageSize(2, getTableName()); } for (int i = 0; i < regionInfo_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -19559,19 +20013,13 @@ public final class MasterProcedureProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, forcible_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -19599,12 +20047,10 @@ public final class MasterProcedureProtos { result = result && (getForcible() == other.getForcible()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -19626,9 +20072,10 @@ public final class MasterProcedureProtos { } if (hasForcible()) { hash = (37 * hash) + FORCIBLE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getForcible()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getForcible()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -19656,46 +20103,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -19703,14 +20161,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.DispatchMergingRegionsStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DispatchMergingRegionsStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DispatchMergingRegionsStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DispatchMergingRegionsStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -19723,31 +20182,28 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getTableNameFieldBuilder(); getRegionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -19763,10 +20219,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_DispatchMergingRegionsStateData_descriptor; @@ -19822,6 +20274,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData)other); @@ -19858,7 +20336,7 @@ public final class MasterProcedureProtos { regionInfo_ = other.regionInfo_; bitField0_ = (bitField0_ & ~0x00000004); regionInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionInfoFieldBuilder() : null; } else { regionInfoBuilder_.addAllMessages(other.regionInfo_); @@ -19868,30 +20346,26 @@ public final class MasterProcedureProtos { if (other.hasForcible()) { setForcible(other.getForcible()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasUserInfo()) { - return false; } if (!hasTableName()) { - return false; } if (!getUserInfo().isInitialized()) { - return false; } if (!getTableName().isInitialized()) { - return false; } for (int i = 0; i < getRegionInfoCount(); i++) { if (!getRegionInfo(i).isInitialized()) { - return false; } } @@ -19907,7 +20381,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -19917,9 +20391,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * required .hbase.pb.UserInformation user_info = 1; @@ -19932,7 +20405,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -19973,6 +20446,7 @@ public final class MasterProcedureProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -19991,7 +20465,7 @@ public final class MasterProcedureProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -20014,19 +20488,20 @@ public final class MasterProcedureProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * required .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -20034,9 +20509,8 @@ public final class MasterProcedureProtos { return userInfoBuilder_; } - // required .hbase.pb.TableName table_name = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 2; @@ -20049,7 +20523,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -20090,6 +20564,7 @@ public final class MasterProcedureProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -20108,7 +20583,7 @@ public final class MasterProcedureProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -20131,19 +20606,20 @@ public final class MasterProcedureProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -20151,7 +20627,6 @@ public final class MasterProcedureProtos { return tableNameBuilder_; } - // repeated .hbase.pb.RegionInfo region_info = 3; private java.util.List regionInfo_ = java.util.Collections.emptyList(); private void ensureRegionInfoIsMutable() { @@ -20161,7 +20636,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** @@ -20293,7 +20768,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (regionInfoBuilder_ == null) { ensureRegionInfoIsMutable(); - super.addAll(values, regionInfo_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionInfo_); onChanged(); } else { regionInfoBuilder_.addAllMessages(values); @@ -20376,11 +20852,11 @@ public final class MasterProcedureProtos { getRegionInfoBuilderList() { return getRegionInfoFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionInfo_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -20391,7 +20867,6 @@ public final class MasterProcedureProtos { return regionInfoBuilder_; } - // optional bool forcible = 4; private boolean forcible_ ; /** * optional bool forcible = 4; @@ -20423,22 +20898,59 @@ public final class MasterProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DispatchMergingRegionsStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.DispatchMergingRegionsStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData DEFAULT_INSTANCE; static { - defaultInstance = new DispatchMergingRegionsStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DispatchMergingRegionsStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DispatchMergingRegionsStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DispatchMergingRegionsStateData) } - public interface ServerCrashStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ServerCrashStateDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ServerCrashStateData) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ServerName server_name = 1; /** * required .hbase.pb.ServerName server_name = 1; */ @@ -20452,7 +20964,6 @@ public final class MasterProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); - // optional bool distributed_log_replay = 2; /** * optional bool distributed_log_replay = 2; */ @@ -20462,7 +20973,6 @@ public final class MasterProcedureProtos { */ boolean getDistributedLogReplay(); - // repeated .hbase.pb.RegionInfo regions_on_crashed_server = 3; /** * repeated .hbase.pb.RegionInfo regions_on_crashed_server = 3; */ @@ -20487,7 +20997,6 @@ public final class MasterProcedureProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionsOnCrashedServerOrBuilder( int index); - // repeated .hbase.pb.RegionInfo regions_assigned = 4; /** * repeated .hbase.pb.RegionInfo regions_assigned = 4; */ @@ -20512,7 +21021,6 @@ public final class MasterProcedureProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionsAssignedOrBuilder( int index); - // optional bool carrying_meta = 5; /** * optional bool carrying_meta = 5; */ @@ -20522,7 +21030,6 @@ public final class MasterProcedureProtos { */ boolean getCarryingMeta(); - // optional bool should_split_wal = 6 [default = true]; /** * optional bool should_split_wal = 6 [default = true]; */ @@ -20535,36 +21042,32 @@ public final class MasterProcedureProtos { /** * Protobuf type {@code hbase.pb.ServerCrashStateData} */ - public static final class ServerCrashStateData extends - com.google.protobuf.GeneratedMessage - implements ServerCrashStateDataOrBuilder { + public static final class ServerCrashStateData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ServerCrashStateData) + ServerCrashStateDataOrBuilder { // Use ServerCrashStateData.newBuilder() to construct. - private ServerCrashStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + private ServerCrashStateData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ServerCrashStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ServerCrashStateData defaultInstance; - public static ServerCrashStateData getDefaultInstance() { - return defaultInstance; - } - - public ServerCrashStateData getDefaultInstanceForType() { - return defaultInstance; + private ServerCrashStateData() { + distributedLogReplay_ = false; + regionsOnCrashedServer_ = java.util.Collections.emptyList(); + regionsAssigned_ = java.util.Collections.emptyList(); + carryingMeta_ = false; + shouldSplitWal_ = true; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ServerCrashStateData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -20606,7 +21109,8 @@ public final class MasterProcedureProtos { regionsOnCrashedServer_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - regionsOnCrashedServer_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionsOnCrashedServer_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } case 34: { @@ -20614,7 +21118,8 @@ public final class MasterProcedureProtos { regionsAssigned_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } - regionsAssigned_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionsAssigned_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } case 40: { @@ -20633,7 +21138,7 @@ public final class MasterProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { regionsOnCrashedServer_ = java.util.Collections.unmodifiableList(regionsOnCrashedServer_); @@ -20650,30 +21155,14 @@ public final class MasterProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ServerCrashStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ServerCrashStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ServerCrashStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ServerCrashStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ServerName server_name = 1; public static final int SERVER_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_; /** @@ -20686,16 +21175,15 @@ public final class MasterProcedureProtos { * required .hbase.pb.ServerName server_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName() { - return serverName_; + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } /** * required .hbase.pb.ServerName server_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - return serverName_; + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } - // optional bool distributed_log_replay = 2; public static final int DISTRIBUTED_LOG_REPLAY_FIELD_NUMBER = 2; private boolean distributedLogReplay_; /** @@ -20711,7 +21199,6 @@ public final class MasterProcedureProtos { return distributedLogReplay_; } - // repeated .hbase.pb.RegionInfo regions_on_crashed_server = 3; public static final int REGIONS_ON_CRASHED_SERVER_FIELD_NUMBER = 3; private java.util.List regionsOnCrashedServer_; /** @@ -20747,7 +21234,6 @@ public final class MasterProcedureProtos { return regionsOnCrashedServer_.get(index); } - // repeated .hbase.pb.RegionInfo regions_assigned = 4; public static final int REGIONS_ASSIGNED_FIELD_NUMBER = 4; private java.util.List regionsAssigned_; /** @@ -20783,7 +21269,6 @@ public final class MasterProcedureProtos { return regionsAssigned_.get(index); } - // optional bool carrying_meta = 5; public static final int CARRYING_META_FIELD_NUMBER = 5; private boolean carryingMeta_; /** @@ -20799,7 +21284,6 @@ public final class MasterProcedureProtos { return carryingMeta_; } - // optional bool should_split_wal = 6 [default = true]; public static final int SHOULD_SPLIT_WAL_FIELD_NUMBER = 6; private boolean shouldSplitWal_; /** @@ -20815,18 +21299,11 @@ public final class MasterProcedureProtos { return shouldSplitWal_; } - private void initFields() { - serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - distributedLogReplay_ = false; - regionsOnCrashedServer_ = java.util.Collections.emptyList(); - regionsAssigned_ = java.util.Collections.emptyList(); - carryingMeta_ = false; - shouldSplitWal_ = true; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasServerName()) { memoizedIsInitialized = 0; @@ -20854,9 +21331,8 @@ public final class MasterProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, serverName_); + output.writeMessage(1, getServerName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, distributedLogReplay_); @@ -20873,18 +21349,17 @@ public final class MasterProcedureProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(6, shouldSplitWal_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, serverName_); + .computeMessageSize(1, getServerName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -20906,19 +21381,13 @@ public final class MasterProcedureProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(6, shouldSplitWal_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -20953,12 +21422,10 @@ public final class MasterProcedureProtos { result = result && (getShouldSplitWal() == other.getShouldSplitWal()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -20972,7 +21439,8 @@ public final class MasterProcedureProtos { } if (hasDistributedLogReplay()) { hash = (37 * hash) + DISTRIBUTED_LOG_REPLAY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getDistributedLogReplay()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDistributedLogReplay()); } if (getRegionsOnCrashedServerCount() > 0) { hash = (37 * hash) + REGIONS_ON_CRASHED_SERVER_FIELD_NUMBER; @@ -20984,13 +21452,15 @@ public final class MasterProcedureProtos { } if (hasCarryingMeta()) { hash = (37 * hash) + CARRYING_META_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCarryingMeta()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getCarryingMeta()); } if (hasShouldSplitWal()) { hash = (37 * hash) + SHOULD_SPLIT_WAL_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getShouldSplitWal()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getShouldSplitWal()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -21018,46 +21488,57 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -21065,14 +21546,15 @@ public final class MasterProcedureProtos { * Protobuf type {@code hbase.pb.ServerCrashStateData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ServerCrashStateData) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ServerCrashStateData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ServerCrashStateData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -21085,25 +21567,22 @@ public final class MasterProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getServerNameFieldBuilder(); getRegionsOnCrashedServerFieldBuilder(); getRegionsAssignedFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + serverName_ = null; } else { serverNameBuilder_.clear(); } @@ -21129,10 +21608,6 @@ public final class MasterProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.internal_static_hbase_pb_ServerCrashStateData_descriptor; @@ -21197,6 +21672,32 @@ public final class MasterProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData)other); @@ -21233,7 +21734,7 @@ public final class MasterProcedureProtos { regionsOnCrashedServer_ = other.regionsOnCrashedServer_; bitField0_ = (bitField0_ & ~0x00000004); regionsOnCrashedServerBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionsOnCrashedServerFieldBuilder() : null; } else { regionsOnCrashedServerBuilder_.addAllMessages(other.regionsOnCrashedServer_); @@ -21259,7 +21760,7 @@ public final class MasterProcedureProtos { regionsAssigned_ = other.regionsAssigned_; bitField0_ = (bitField0_ & ~0x00000008); regionsAssignedBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionsAssignedFieldBuilder() : null; } else { regionsAssignedBuilder_.addAllMessages(other.regionsAssigned_); @@ -21272,28 +21773,25 @@ public final class MasterProcedureProtos { if (other.hasShouldSplitWal()) { setShouldSplitWal(other.getShouldSplitWal()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasServerName()) { - return false; } if (!getServerName().isInitialized()) { - return false; } for (int i = 0; i < getRegionsOnCrashedServerCount(); i++) { if (!getRegionsOnCrashedServer(i).isInitialized()) { - return false; } } for (int i = 0; i < getRegionsAssignedCount(); i++) { if (!getRegionsAssigned(i).isInitialized()) { - return false; } } @@ -21309,7 +21807,7 @@ public final class MasterProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -21319,9 +21817,8 @@ public final class MasterProcedureProtos { } private int bitField0_; - // required .hbase.pb.ServerName server_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; /** * required .hbase.pb.ServerName server_name = 1; @@ -21334,7 +21831,7 @@ public final class MasterProcedureProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName() { if (serverNameBuilder_ == null) { - return serverName_; + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } else { return serverNameBuilder_.getMessage(); } @@ -21375,6 +21872,7 @@ public final class MasterProcedureProtos { public Builder mergeServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + serverName_ != null && serverName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); @@ -21393,7 +21891,7 @@ public final class MasterProcedureProtos { */ public Builder clearServerName() { if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + serverName_ = null; onChanged(); } else { serverNameBuilder_.clear(); @@ -21416,19 +21914,20 @@ public final class MasterProcedureProtos { if (serverNameBuilder_ != null) { return serverNameBuilder_.getMessageOrBuilder(); } else { - return serverName_; + return serverName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } } /** * required .hbase.pb.ServerName server_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerNameFieldBuilder() { if (serverNameBuilder_ == null) { - serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - serverName_, + getServerName(), getParentForChildren(), isClean()); serverName_ = null; @@ -21436,7 +21935,6 @@ public final class MasterProcedureProtos { return serverNameBuilder_; } - // optional bool distributed_log_replay = 2; private boolean distributedLogReplay_ ; /** * optional bool distributed_log_replay = 2; @@ -21469,7 +21967,6 @@ public final class MasterProcedureProtos { return this; } - // repeated .hbase.pb.RegionInfo regions_on_crashed_server = 3; private java.util.List regionsOnCrashedServer_ = java.util.Collections.emptyList(); private void ensureRegionsOnCrashedServerIsMutable() { @@ -21479,7 +21976,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionsOnCrashedServerBuilder_; /** @@ -21611,7 +22108,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (regionsOnCrashedServerBuilder_ == null) { ensureRegionsOnCrashedServerIsMutable(); - super.addAll(values, regionsOnCrashedServer_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionsOnCrashedServer_); onChanged(); } else { regionsOnCrashedServerBuilder_.addAllMessages(values); @@ -21694,11 +22192,11 @@ public final class MasterProcedureProtos { getRegionsOnCrashedServerBuilderList() { return getRegionsOnCrashedServerFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionsOnCrashedServerFieldBuilder() { if (regionsOnCrashedServerBuilder_ == null) { - regionsOnCrashedServerBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionsOnCrashedServerBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionsOnCrashedServer_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -21709,7 +22207,6 @@ public final class MasterProcedureProtos { return regionsOnCrashedServerBuilder_; } - // repeated .hbase.pb.RegionInfo regions_assigned = 4; private java.util.List regionsAssigned_ = java.util.Collections.emptyList(); private void ensureRegionsAssignedIsMutable() { @@ -21719,7 +22216,7 @@ public final class MasterProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionsAssignedBuilder_; /** @@ -21851,7 +22348,8 @@ public final class MasterProcedureProtos { java.lang.Iterable values) { if (regionsAssignedBuilder_ == null) { ensureRegionsAssignedIsMutable(); - super.addAll(values, regionsAssigned_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionsAssigned_); onChanged(); } else { regionsAssignedBuilder_.addAllMessages(values); @@ -21934,11 +22432,11 @@ public final class MasterProcedureProtos { getRegionsAssignedBuilderList() { return getRegionsAssignedFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionsAssignedFieldBuilder() { if (regionsAssignedBuilder_ == null) { - regionsAssignedBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionsAssignedBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionsAssigned_, ((bitField0_ & 0x00000008) == 0x00000008), @@ -21949,7 +22447,6 @@ public final class MasterProcedureProtos { return regionsAssignedBuilder_; } - // optional bool carrying_meta = 5; private boolean carryingMeta_ ; /** * optional bool carrying_meta = 5; @@ -21982,7 +22479,6 @@ public final class MasterProcedureProtos { return this; } - // optional bool should_split_wal = 6 [default = true]; private boolean shouldSplitWal_ = true; /** * optional bool should_split_wal = 6 [default = true]; @@ -22014,109 +22510,146 @@ public final class MasterProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ServerCrashStateData) } + // @@protoc_insertion_point(class_scope:hbase.pb.ServerCrashStateData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData DEFAULT_INSTANCE; static { - defaultInstance = new ServerCrashStateData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ServerCrashStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ServerCrashStateData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ServerCrashStateData) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CreateTableStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CreateTableStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ModifyTableStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ModifyTableStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TruncateTableStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TruncateTableStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteTableStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteTableStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CreateNamespaceStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CreateNamespaceStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ModifyNamespaceStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ModifyNamespaceStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteNamespaceStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteNamespaceStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_AddColumnFamilyStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_AddColumnFamilyStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ModifyColumnFamilyStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ModifyColumnFamilyStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteColumnFamilyStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteColumnFamilyStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_EnableTableStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_EnableTableStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DisableTableStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DisableTableStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RestoreParentToChildRegionsPair_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RestoreParentToChildRegionsPair_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CloneSnapshotStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CloneSnapshotStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RestoreSnapshotStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RestoreSnapshotStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DispatchMergingRegionsStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DispatchMergingRegionsStateData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ServerCrashStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ServerCrashStateData_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -22303,121 +22836,123 @@ public final class MasterProcedureProtos { "ureProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_CreateTableStateData_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_CreateTableStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CreateTableStateData_descriptor, - new java.lang.String[] { "UserInfo", "TableSchema", "RegionInfo", }); - internal_static_hbase_pb_ModifyTableStateData_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_ModifyTableStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ModifyTableStateData_descriptor, - new java.lang.String[] { "UserInfo", "UnmodifiedTableSchema", "ModifiedTableSchema", "DeleteColumnFamilyInModify", }); - internal_static_hbase_pb_TruncateTableStateData_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_TruncateTableStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TruncateTableStateData_descriptor, - new java.lang.String[] { "UserInfo", "PreserveSplits", "TableName", "TableSchema", "RegionInfo", }); - internal_static_hbase_pb_DeleteTableStateData_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_DeleteTableStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteTableStateData_descriptor, - new java.lang.String[] { "UserInfo", "TableName", "RegionInfo", }); - internal_static_hbase_pb_CreateNamespaceStateData_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_CreateNamespaceStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CreateNamespaceStateData_descriptor, - new java.lang.String[] { "NamespaceDescriptor", }); - internal_static_hbase_pb_ModifyNamespaceStateData_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_ModifyNamespaceStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ModifyNamespaceStateData_descriptor, - new java.lang.String[] { "NamespaceDescriptor", "UnmodifiedNamespaceDescriptor", }); - internal_static_hbase_pb_DeleteNamespaceStateData_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_DeleteNamespaceStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteNamespaceStateData_descriptor, - new java.lang.String[] { "NamespaceName", "NamespaceDescriptor", }); - internal_static_hbase_pb_AddColumnFamilyStateData_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_AddColumnFamilyStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_AddColumnFamilyStateData_descriptor, - new java.lang.String[] { "UserInfo", "TableName", "ColumnfamilySchema", "UnmodifiedTableSchema", }); - internal_static_hbase_pb_ModifyColumnFamilyStateData_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_ModifyColumnFamilyStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ModifyColumnFamilyStateData_descriptor, - new java.lang.String[] { "UserInfo", "TableName", "ColumnfamilySchema", "UnmodifiedTableSchema", }); - internal_static_hbase_pb_DeleteColumnFamilyStateData_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_hbase_pb_DeleteColumnFamilyStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteColumnFamilyStateData_descriptor, - new java.lang.String[] { "UserInfo", "TableName", "ColumnfamilyName", "UnmodifiedTableSchema", }); - internal_static_hbase_pb_EnableTableStateData_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_hbase_pb_EnableTableStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_EnableTableStateData_descriptor, - new java.lang.String[] { "UserInfo", "TableName", "SkipTableStateCheck", }); - internal_static_hbase_pb_DisableTableStateData_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_hbase_pb_DisableTableStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DisableTableStateData_descriptor, - new java.lang.String[] { "UserInfo", "TableName", "SkipTableStateCheck", }); - internal_static_hbase_pb_RestoreParentToChildRegionsPair_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_hbase_pb_RestoreParentToChildRegionsPair_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RestoreParentToChildRegionsPair_descriptor, - new java.lang.String[] { "ParentRegionName", "Child1RegionName", "Child2RegionName", }); - internal_static_hbase_pb_CloneSnapshotStateData_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_hbase_pb_CloneSnapshotStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CloneSnapshotStateData_descriptor, - new java.lang.String[] { "UserInfo", "Snapshot", "TableSchema", "RegionInfo", "ParentToChildRegionsPairList", }); - internal_static_hbase_pb_RestoreSnapshotStateData_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_hbase_pb_RestoreSnapshotStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RestoreSnapshotStateData_descriptor, - new java.lang.String[] { "UserInfo", "Snapshot", "ModifiedTableSchema", "RegionInfoForRestore", "RegionInfoForRemove", "RegionInfoForAdd", "ParentToChildRegionsPairList", }); - internal_static_hbase_pb_DispatchMergingRegionsStateData_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_hbase_pb_DispatchMergingRegionsStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DispatchMergingRegionsStateData_descriptor, - new java.lang.String[] { "UserInfo", "TableName", "RegionInfo", "Forcible", }); - internal_static_hbase_pb_ServerCrashStateData_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_hbase_pb_ServerCrashStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ServerCrashStateData_descriptor, - new java.lang.String[] { "ServerName", "DistributedLogReplay", "RegionsOnCrashedServer", "RegionsAssigned", "CarryingMeta", "ShouldSplitWal", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_CreateTableStateData_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_CreateTableStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CreateTableStateData_descriptor, + new java.lang.String[] { "UserInfo", "TableSchema", "RegionInfo", }); + internal_static_hbase_pb_ModifyTableStateData_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_ModifyTableStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ModifyTableStateData_descriptor, + new java.lang.String[] { "UserInfo", "UnmodifiedTableSchema", "ModifiedTableSchema", "DeleteColumnFamilyInModify", }); + internal_static_hbase_pb_TruncateTableStateData_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_TruncateTableStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TruncateTableStateData_descriptor, + new java.lang.String[] { "UserInfo", "PreserveSplits", "TableName", "TableSchema", "RegionInfo", }); + internal_static_hbase_pb_DeleteTableStateData_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_DeleteTableStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteTableStateData_descriptor, + new java.lang.String[] { "UserInfo", "TableName", "RegionInfo", }); + internal_static_hbase_pb_CreateNamespaceStateData_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_CreateNamespaceStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CreateNamespaceStateData_descriptor, + new java.lang.String[] { "NamespaceDescriptor", }); + internal_static_hbase_pb_ModifyNamespaceStateData_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_ModifyNamespaceStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ModifyNamespaceStateData_descriptor, + new java.lang.String[] { "NamespaceDescriptor", "UnmodifiedNamespaceDescriptor", }); + internal_static_hbase_pb_DeleteNamespaceStateData_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_DeleteNamespaceStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteNamespaceStateData_descriptor, + new java.lang.String[] { "NamespaceName", "NamespaceDescriptor", }); + internal_static_hbase_pb_AddColumnFamilyStateData_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_AddColumnFamilyStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_AddColumnFamilyStateData_descriptor, + new java.lang.String[] { "UserInfo", "TableName", "ColumnfamilySchema", "UnmodifiedTableSchema", }); + internal_static_hbase_pb_ModifyColumnFamilyStateData_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_ModifyColumnFamilyStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ModifyColumnFamilyStateData_descriptor, + new java.lang.String[] { "UserInfo", "TableName", "ColumnfamilySchema", "UnmodifiedTableSchema", }); + internal_static_hbase_pb_DeleteColumnFamilyStateData_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_hbase_pb_DeleteColumnFamilyStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteColumnFamilyStateData_descriptor, + new java.lang.String[] { "UserInfo", "TableName", "ColumnfamilyName", "UnmodifiedTableSchema", }); + internal_static_hbase_pb_EnableTableStateData_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_hbase_pb_EnableTableStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_EnableTableStateData_descriptor, + new java.lang.String[] { "UserInfo", "TableName", "SkipTableStateCheck", }); + internal_static_hbase_pb_DisableTableStateData_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_hbase_pb_DisableTableStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DisableTableStateData_descriptor, + new java.lang.String[] { "UserInfo", "TableName", "SkipTableStateCheck", }); + internal_static_hbase_pb_RestoreParentToChildRegionsPair_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_hbase_pb_RestoreParentToChildRegionsPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RestoreParentToChildRegionsPair_descriptor, + new java.lang.String[] { "ParentRegionName", "Child1RegionName", "Child2RegionName", }); + internal_static_hbase_pb_CloneSnapshotStateData_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_hbase_pb_CloneSnapshotStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CloneSnapshotStateData_descriptor, + new java.lang.String[] { "UserInfo", "Snapshot", "TableSchema", "RegionInfo", "ParentToChildRegionsPairList", }); + internal_static_hbase_pb_RestoreSnapshotStateData_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_hbase_pb_RestoreSnapshotStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RestoreSnapshotStateData_descriptor, + new java.lang.String[] { "UserInfo", "Snapshot", "ModifiedTableSchema", "RegionInfoForRestore", "RegionInfoForRemove", "RegionInfoForAdd", "ParentToChildRegionsPairList", }); + internal_static_hbase_pb_DispatchMergingRegionsStateData_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_hbase_pb_DispatchMergingRegionsStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DispatchMergingRegionsStateData_descriptor, + new java.lang.String[] { "UserInfo", "TableName", "RegionInfo", "Forcible", }); + internal_static_hbase_pb_ServerCrashStateData_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_hbase_pb_ServerCrashStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ServerCrashStateData_descriptor, + new java.lang.String[] { "ServerName", "DistributedLogReplay", "RegionsOnCrashedServer", "RegionsAssigned", "CarryingMeta", "ShouldSplitWal", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java index eeabfb0..4426013 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java @@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class MasterProtos { private MasterProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } /** * Protobuf enum {@code hbase.pb.MasterSwitchType} @@ -16,11 +22,11 @@ public final class MasterProtos { /** * SPLIT = 0; */ - SPLIT(0, 0), + SPLIT(0), /** * MERGE = 1; */ - MERGE(1, 1), + MERGE(1), ; /** @@ -33,9 +39,19 @@ public final class MasterProtos { public static final int MERGE_VALUE = 1; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static MasterSwitchType valueOf(int value) { + return forNumber(value); + } + + public static MasterSwitchType forNumber(int value) { switch (value) { case 0: return SPLIT; case 1: return MERGE; @@ -47,17 +63,17 @@ public final class MasterProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + MasterSwitchType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public MasterSwitchType findValueByNumber(int number) { - return MasterSwitchType.valueOf(number); + return MasterSwitchType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -79,21 +95,19 @@ public final class MasterProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private MasterSwitchType(int index, int value) { - this.index = index; + private MasterSwitchType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.MasterSwitchType) } - public interface AddColumnRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface AddColumnRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.AddColumnRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -107,7 +121,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required .hbase.pb.ColumnFamilySchema column_families = 2; /** * required .hbase.pb.ColumnFamilySchema column_families = 2; */ @@ -121,7 +134,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); - // optional uint64 nonce_group = 3 [default = 0]; /** * optional uint64 nonce_group = 3 [default = 0]; */ @@ -131,7 +143,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 4 [default = 0]; /** * optional uint64 nonce = 4 [default = 0]; */ @@ -144,36 +155,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.AddColumnRequest} */ - public static final class AddColumnRequest extends - com.google.protobuf.GeneratedMessage - implements AddColumnRequestOrBuilder { + public static final class AddColumnRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.AddColumnRequest) + AddColumnRequestOrBuilder { // Use AddColumnRequest.newBuilder() to construct. - private AddColumnRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private AddColumnRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private AddColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AddColumnRequest defaultInstance; - public static AddColumnRequest getDefaultInstance() { - return defaultInstance; - } - - public AddColumnRequest getDefaultInstanceForType() { - return defaultInstance; + private AddColumnRequest() { + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private AddColumnRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -234,7 +238,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -245,30 +249,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public AddColumnRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AddColumnRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -281,16 +269,15 @@ public final class MasterProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // required .hbase.pb.ColumnFamilySchema column_families = 2; public static final int COLUMN_FAMILIES_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; /** @@ -303,16 +290,15 @@ public final class MasterProtos { * required .hbase.pb.ColumnFamilySchema column_families = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { - return columnFamilies_; + return columnFamilies_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_; } /** * required .hbase.pb.ColumnFamilySchema column_families = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { - return columnFamilies_; + return columnFamilies_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_; } - // optional uint64 nonce_group = 3 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 3; private long nonceGroup_; /** @@ -328,7 +314,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 4 [default = 0]; public static final int NONCE_FIELD_NUMBER = 4; private long nonce_; /** @@ -344,16 +329,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -377,12 +357,11 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, columnFamilies_); + output.writeMessage(2, getColumnFamilies()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, nonceGroup_); @@ -390,22 +369,21 @@ public final class MasterProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, columnFamilies_); + .computeMessageSize(2, getColumnFamilies()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -415,19 +393,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -458,12 +430,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -481,13 +451,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -515,46 +487,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -562,14 +545,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.AddColumnRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.AddColumnRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -582,30 +566,27 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getColumnFamiliesFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (columnFamiliesBuilder_ == null) { - columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + columnFamilies_ = null; } else { columnFamiliesBuilder_.clear(); } @@ -617,10 +598,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnRequest_descriptor; @@ -671,6 +648,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest)other); @@ -694,25 +697,22 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!hasColumnFamilies()) { - return false; } if (!getTableName().isInitialized()) { - return false; } if (!getColumnFamilies().isInitialized()) { - return false; } return true; @@ -727,7 +727,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -737,9 +737,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -752,7 +751,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -793,6 +792,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -811,7 +811,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -834,19 +834,20 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -854,9 +855,8 @@ public final class MasterProtos { return tableNameBuilder_; } - // required .hbase.pb.ColumnFamilySchema column_families = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; /** * required .hbase.pb.ColumnFamilySchema column_families = 2; @@ -869,7 +869,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { if (columnFamiliesBuilder_ == null) { - return columnFamilies_; + return columnFamilies_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_; } else { return columnFamiliesBuilder_.getMessage(); } @@ -910,6 +910,7 @@ public final class MasterProtos { public Builder mergeColumnFamilies(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + columnFamilies_ != null && columnFamilies_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); @@ -928,7 +929,7 @@ public final class MasterProtos { */ public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { - columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + columnFamilies_ = null; onChanged(); } else { columnFamiliesBuilder_.clear(); @@ -951,19 +952,20 @@ public final class MasterProtos { if (columnFamiliesBuilder_ != null) { return columnFamiliesBuilder_.getMessageOrBuilder(); } else { - return columnFamilies_; + return columnFamilies_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_; } } /** * required .hbase.pb.ColumnFamilySchema column_families = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { if (columnFamiliesBuilder_ == null) { - columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< + columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( - columnFamilies_, + getColumnFamilies(), getParentForChildren(), isClean()); columnFamilies_ = null; @@ -971,7 +973,6 @@ public final class MasterProtos { return columnFamiliesBuilder_; } - // optional uint64 nonce_group = 3 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 3 [default = 0]; @@ -1004,7 +1005,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 4 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 4 [default = 0]; @@ -1036,22 +1036,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.AddColumnRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.AddColumnRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest DEFAULT_INSTANCE; static { - defaultInstance = new AddColumnRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AddColumnRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AddColumnRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.AddColumnRequest) } - public interface AddColumnResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface AddColumnResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.AddColumnResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -1064,36 +1101,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.AddColumnResponse} */ - public static final class AddColumnResponse extends - com.google.protobuf.GeneratedMessage - implements AddColumnResponseOrBuilder { + public static final class AddColumnResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.AddColumnResponse) + AddColumnResponseOrBuilder { // Use AddColumnResponse.newBuilder() to construct. - private AddColumnResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private AddColumnResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private AddColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AddColumnResponse defaultInstance; - public static AddColumnResponse getDefaultInstance() { - return defaultInstance; } - - public AddColumnResponse getDefaultInstanceForType() { - return defaultInstance; + private AddColumnResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private AddColumnResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1123,7 +1152,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1134,30 +1163,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public AddColumnResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AddColumnResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -1173,13 +1186,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -1187,16 +1198,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -1204,19 +1213,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1232,12 +1235,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1247,9 +1248,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1277,46 +1279,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1324,14 +1337,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.AddColumnResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.AddColumnResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1344,18 +1358,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -1363,10 +1374,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnResponse_descriptor; @@ -1397,6 +1404,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse)other); @@ -1411,7 +1444,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -1428,7 +1462,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1438,7 +1472,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -1470,22 +1503,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.AddColumnResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.AddColumnResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse DEFAULT_INSTANCE; static { - defaultInstance = new AddColumnResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AddColumnResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AddColumnResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.AddColumnResponse) } - public interface DeleteColumnRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteColumnRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteColumnRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -1499,7 +1569,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required bytes column_name = 2; /** * required bytes column_name = 2; */ @@ -1509,7 +1578,6 @@ public final class MasterProtos { */ com.google.protobuf.ByteString getColumnName(); - // optional uint64 nonce_group = 3 [default = 0]; /** * optional uint64 nonce_group = 3 [default = 0]; */ @@ -1519,7 +1587,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 4 [default = 0]; /** * optional uint64 nonce = 4 [default = 0]; */ @@ -1532,36 +1599,30 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.DeleteColumnRequest} */ - public static final class DeleteColumnRequest extends - com.google.protobuf.GeneratedMessage - implements DeleteColumnRequestOrBuilder { + public static final class DeleteColumnRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteColumnRequest) + DeleteColumnRequestOrBuilder { // Use DeleteColumnRequest.newBuilder() to construct. - private DeleteColumnRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteColumnRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private DeleteColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteColumnRequest defaultInstance; - public static DeleteColumnRequest getDefaultInstance() { - return defaultInstance; - } - - public DeleteColumnRequest getDefaultInstanceForType() { - return defaultInstance; + private DeleteColumnRequest() { + columnName_ = com.google.protobuf.ByteString.EMPTY; + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteColumnRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1614,7 +1675,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1625,30 +1686,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteColumnRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteColumnRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -1661,16 +1706,15 @@ public final class MasterProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // required bytes column_name = 2; public static final int COLUMN_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString columnName_; /** @@ -1686,7 +1730,6 @@ public final class MasterProtos { return columnName_; } - // optional uint64 nonce_group = 3 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 3; private long nonceGroup_; /** @@ -1702,7 +1745,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 4 [default = 0]; public static final int NONCE_FIELD_NUMBER = 4; private long nonce_; /** @@ -1718,16 +1760,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - columnName_ = com.google.protobuf.ByteString.EMPTY; - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -1747,9 +1784,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, columnName_); @@ -1760,18 +1796,17 @@ public final class MasterProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -1785,19 +1820,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1828,12 +1857,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1851,13 +1878,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1885,46 +1914,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1932,14 +1972,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DeleteColumnRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteColumnRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1952,23 +1993,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -1982,10 +2020,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnRequest_descriptor; @@ -2032,6 +2066,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest)other); @@ -2055,21 +2115,19 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!hasColumnName()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -2084,7 +2142,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2094,9 +2152,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -2109,7 +2166,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -2150,6 +2207,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -2168,7 +2226,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -2191,19 +2249,20 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -2211,7 +2270,6 @@ public final class MasterProtos { return tableNameBuilder_; } - // required bytes column_name = 2; private com.google.protobuf.ByteString columnName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes column_name = 2; @@ -2247,7 +2305,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce_group = 3 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 3 [default = 0]; @@ -2280,7 +2337,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 4 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 4 [default = 0]; @@ -2312,22 +2368,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteColumnRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteColumnRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest DEFAULT_INSTANCE; static { - defaultInstance = new DeleteColumnRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteColumnRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteColumnRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteColumnRequest) } - public interface DeleteColumnResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteColumnResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteColumnResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -2340,36 +2433,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.DeleteColumnResponse} */ - public static final class DeleteColumnResponse extends - com.google.protobuf.GeneratedMessage - implements DeleteColumnResponseOrBuilder { + public static final class DeleteColumnResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteColumnResponse) + DeleteColumnResponseOrBuilder { // Use DeleteColumnResponse.newBuilder() to construct. - private DeleteColumnResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteColumnResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DeleteColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteColumnResponse defaultInstance; - public static DeleteColumnResponse getDefaultInstance() { - return defaultInstance; } - - public DeleteColumnResponse getDefaultInstanceForType() { - return defaultInstance; + private DeleteColumnResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteColumnResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2399,7 +2484,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2410,30 +2495,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteColumnResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteColumnResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -2449,13 +2518,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -2463,16 +2530,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -2480,19 +2545,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2508,12 +2567,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2523,9 +2580,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2553,46 +2611,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2600,14 +2669,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DeleteColumnResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteColumnResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2620,18 +2690,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -2639,10 +2706,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnResponse_descriptor; @@ -2673,6 +2736,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse)other); @@ -2687,7 +2776,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -2704,7 +2794,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2714,7 +2804,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -2746,22 +2835,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteColumnResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteColumnResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse DEFAULT_INSTANCE; static { - defaultInstance = new DeleteColumnResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteColumnResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteColumnResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteColumnResponse) } - public interface ModifyColumnRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ModifyColumnRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ModifyColumnRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -2775,7 +2901,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required .hbase.pb.ColumnFamilySchema column_families = 2; /** * required .hbase.pb.ColumnFamilySchema column_families = 2; */ @@ -2789,7 +2914,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); - // optional uint64 nonce_group = 3 [default = 0]; /** * optional uint64 nonce_group = 3 [default = 0]; */ @@ -2799,7 +2923,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 4 [default = 0]; /** * optional uint64 nonce = 4 [default = 0]; */ @@ -2812,36 +2935,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ModifyColumnRequest} */ - public static final class ModifyColumnRequest extends - com.google.protobuf.GeneratedMessage - implements ModifyColumnRequestOrBuilder { + public static final class ModifyColumnRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ModifyColumnRequest) + ModifyColumnRequestOrBuilder { // Use ModifyColumnRequest.newBuilder() to construct. - private ModifyColumnRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ModifyColumnRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ModifyColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ModifyColumnRequest defaultInstance; - public static ModifyColumnRequest getDefaultInstance() { - return defaultInstance; - } - - public ModifyColumnRequest getDefaultInstanceForType() { - return defaultInstance; + private ModifyColumnRequest() { + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ModifyColumnRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2902,7 +3018,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2913,30 +3029,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyColumnRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ModifyColumnRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ModifyColumnRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -2949,16 +3049,15 @@ public final class MasterProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // required .hbase.pb.ColumnFamilySchema column_families = 2; public static final int COLUMN_FAMILIES_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; /** @@ -2971,16 +3070,15 @@ public final class MasterProtos { * required .hbase.pb.ColumnFamilySchema column_families = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { - return columnFamilies_; + return columnFamilies_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_; } /** * required .hbase.pb.ColumnFamilySchema column_families = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { - return columnFamilies_; + return columnFamilies_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_; } - // optional uint64 nonce_group = 3 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 3; private long nonceGroup_; /** @@ -2996,7 +3094,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 4 [default = 0]; public static final int NONCE_FIELD_NUMBER = 4; private long nonce_; /** @@ -3012,16 +3109,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -3045,12 +3137,11 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, columnFamilies_); + output.writeMessage(2, getColumnFamilies()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, nonceGroup_); @@ -3058,22 +3149,21 @@ public final class MasterProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, columnFamilies_); + .computeMessageSize(2, getColumnFamilies()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -3083,19 +3173,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3126,12 +3210,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3149,13 +3231,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3183,46 +3267,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3230,14 +3325,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ModifyColumnRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ModifyColumnRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyColumnRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3250,30 +3346,27 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getColumnFamiliesFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (columnFamiliesBuilder_ == null) { - columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + columnFamilies_ = null; } else { columnFamiliesBuilder_.clear(); } @@ -3285,10 +3378,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyColumnRequest_descriptor; @@ -3339,6 +3428,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest)other); @@ -3362,25 +3477,22 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!hasColumnFamilies()) { - return false; } if (!getTableName().isInitialized()) { - return false; } if (!getColumnFamilies().isInitialized()) { - return false; } return true; @@ -3395,7 +3507,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3405,9 +3517,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -3420,7 +3531,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -3461,6 +3572,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -3479,7 +3591,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -3502,19 +3614,20 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -3522,9 +3635,8 @@ public final class MasterProtos { return tableNameBuilder_; } - // required .hbase.pb.ColumnFamilySchema column_families = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; /** * required .hbase.pb.ColumnFamilySchema column_families = 2; @@ -3537,7 +3649,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { if (columnFamiliesBuilder_ == null) { - return columnFamilies_; + return columnFamilies_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_; } else { return columnFamiliesBuilder_.getMessage(); } @@ -3578,6 +3690,7 @@ public final class MasterProtos { public Builder mergeColumnFamilies(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + columnFamilies_ != null && columnFamilies_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); @@ -3596,7 +3709,7 @@ public final class MasterProtos { */ public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { - columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + columnFamilies_ = null; onChanged(); } else { columnFamiliesBuilder_.clear(); @@ -3619,19 +3732,20 @@ public final class MasterProtos { if (columnFamiliesBuilder_ != null) { return columnFamiliesBuilder_.getMessageOrBuilder(); } else { - return columnFamilies_; + return columnFamilies_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_; } } /** * required .hbase.pb.ColumnFamilySchema column_families = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { if (columnFamiliesBuilder_ == null) { - columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< + columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( - columnFamilies_, + getColumnFamilies(), getParentForChildren(), isClean()); columnFamilies_ = null; @@ -3639,7 +3753,6 @@ public final class MasterProtos { return columnFamiliesBuilder_; } - // optional uint64 nonce_group = 3 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 3 [default = 0]; @@ -3672,7 +3785,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 4 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 4 [default = 0]; @@ -3704,22 +3816,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ModifyColumnRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ModifyColumnRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest DEFAULT_INSTANCE; static { - defaultInstance = new ModifyColumnRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ModifyColumnRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyColumnRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ModifyColumnRequest) } - public interface ModifyColumnResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ModifyColumnResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ModifyColumnResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -3732,36 +3881,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ModifyColumnResponse} */ - public static final class ModifyColumnResponse extends - com.google.protobuf.GeneratedMessage - implements ModifyColumnResponseOrBuilder { + public static final class ModifyColumnResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ModifyColumnResponse) + ModifyColumnResponseOrBuilder { // Use ModifyColumnResponse.newBuilder() to construct. - private ModifyColumnResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ModifyColumnResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ModifyColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ModifyColumnResponse defaultInstance; - public static ModifyColumnResponse getDefaultInstance() { - return defaultInstance; - } - - public ModifyColumnResponse getDefaultInstanceForType() { - return defaultInstance; + private ModifyColumnResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ModifyColumnResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3791,7 +3932,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -3802,30 +3943,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyColumnResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ModifyColumnResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ModifyColumnResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -3841,13 +3966,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -3855,16 +3978,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -3872,19 +3993,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3900,12 +4015,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3915,9 +4028,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3945,46 +4059,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3992,14 +4117,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ModifyColumnResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ModifyColumnResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyColumnResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4012,18 +4138,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -4031,10 +4154,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyColumnResponse_descriptor; @@ -4065,6 +4184,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse)other); @@ -4079,7 +4224,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -4096,7 +4242,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4106,7 +4252,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -4138,22 +4283,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ModifyColumnResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ModifyColumnResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse DEFAULT_INSTANCE; static { - defaultInstance = new ModifyColumnResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ModifyColumnResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyColumnResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ModifyColumnResponse) } - public interface MoveRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MoveRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MoveRegionRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -4167,7 +4349,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // optional .hbase.pb.ServerName dest_server_name = 2; /** * optional .hbase.pb.ServerName dest_server_name = 2; */ @@ -4184,36 +4365,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.MoveRegionRequest} */ - public static final class MoveRegionRequest extends - com.google.protobuf.GeneratedMessage - implements MoveRegionRequestOrBuilder { + public static final class MoveRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MoveRegionRequest) + MoveRegionRequestOrBuilder { // Use MoveRegionRequest.newBuilder() to construct. - private MoveRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private MoveRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private MoveRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MoveRegionRequest defaultInstance; - public static MoveRegionRequest getDefaultInstance() { - return defaultInstance; + private MoveRegionRequest() { } - public MoveRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MoveRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4264,7 +4436,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -4275,30 +4447,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MoveRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MoveRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MoveRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MoveRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -4311,16 +4467,15 @@ public final class MasterProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // optional .hbase.pb.ServerName dest_server_name = 2; public static final int DEST_SERVER_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName destServerName_; /** @@ -4333,23 +4488,20 @@ public final class MasterProtos { * optional .hbase.pb.ServerName dest_server_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getDestServerName() { - return destServerName_; + return destServerName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : destServerName_; } /** * optional .hbase.pb.ServerName dest_server_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { - return destServerName_; + return destServerName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : destServerName_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - destServerName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -4371,43 +4523,35 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, destServerName_); + output.writeMessage(2, getDestServerName()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, destServerName_); + .computeMessageSize(2, getDestServerName()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4428,12 +4572,10 @@ public final class MasterProtos { result = result && getDestServerName() .equals(other.getDestServerName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4449,7 +4591,7 @@ public final class MasterProtos { hash = (37 * hash) + DEST_SERVER_NAME_FIELD_NUMBER; hash = (53 * hash) + getDestServerName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4477,46 +4619,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4524,14 +4677,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.MoveRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MoveRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MoveRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MoveRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4544,30 +4698,27 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getDestServerNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (destServerNameBuilder_ == null) { - destServerName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + destServerName_ = null; } else { destServerNameBuilder_.clear(); } @@ -4575,10 +4726,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MoveRegionRequest_descriptor; @@ -4621,6 +4768,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest)other); @@ -4638,22 +4811,20 @@ public final class MasterProtos { if (other.hasDestServerName()) { mergeDestServerName(other.getDestServerName()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } if (hasDestServerName()) { if (!getDestServerName().isInitialized()) { - return false; } } @@ -4669,7 +4840,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4679,9 +4850,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -4694,7 +4864,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -4735,6 +4905,7 @@ public final class MasterProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -4753,7 +4924,7 @@ public final class MasterProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -4776,19 +4947,20 @@ public final class MasterProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -4796,9 +4968,8 @@ public final class MasterProtos { return regionBuilder_; } - // optional .hbase.pb.ServerName dest_server_name = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName destServerName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName destServerName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destServerNameBuilder_; /** * optional .hbase.pb.ServerName dest_server_name = 2; @@ -4811,7 +4982,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getDestServerName() { if (destServerNameBuilder_ == null) { - return destServerName_; + return destServerName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : destServerName_; } else { return destServerNameBuilder_.getMessage(); } @@ -4852,6 +5023,7 @@ public final class MasterProtos { public Builder mergeDestServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (destServerNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + destServerName_ != null && destServerName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { destServerName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(destServerName_).mergeFrom(value).buildPartial(); @@ -4870,7 +5042,7 @@ public final class MasterProtos { */ public Builder clearDestServerName() { if (destServerNameBuilder_ == null) { - destServerName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + destServerName_ = null; onChanged(); } else { destServerNameBuilder_.clear(); @@ -4893,73 +5065,103 @@ public final class MasterProtos { if (destServerNameBuilder_ != null) { return destServerNameBuilder_.getMessageOrBuilder(); } else { - return destServerName_; + return destServerName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : destServerName_; } } /** * optional .hbase.pb.ServerName dest_server_name = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getDestServerNameFieldBuilder() { if (destServerNameBuilder_ == null) { - destServerNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + destServerNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - destServerName_, + getDestServerName(), getParentForChildren(), isClean()); destServerName_ = null; } return destServerNameBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MoveRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.MoveRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new MoveRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MoveRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MoveRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MoveRegionRequest) } - public interface MoveRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MoveRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MoveRegionResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.MoveRegionResponse} */ - public static final class MoveRegionResponse extends - com.google.protobuf.GeneratedMessage - implements MoveRegionResponseOrBuilder { + public static final class MoveRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MoveRegionResponse) + MoveRegionResponseOrBuilder { // Use MoveRegionResponse.newBuilder() to construct. - private MoveRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private MoveRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private MoveRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MoveRegionResponse defaultInstance; - public static MoveRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public MoveRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private MoveRegionResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MoveRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -4983,7 +5185,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -4994,34 +5196,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MoveRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MoveRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MoveRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MoveRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -5029,29 +5215,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5062,12 +5240,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5075,7 +5251,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5103,46 +5279,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -5150,14 +5337,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.MoveRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MoveRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MoveRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MoveRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5170,27 +5358,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MoveRegionResponse_descriptor; @@ -5214,6 +5395,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse)other); @@ -5225,7 +5432,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -5242,7 +5450,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5250,22 +5458,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MoveRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.MoveRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new MoveRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MoveRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MoveRegionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MoveRegionResponse) } - public interface DispatchMergingRegionsRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DispatchMergingRegionsRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DispatchMergingRegionsRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region_a = 1; /** * required .hbase.pb.RegionSpecifier region_a = 1; */ @@ -5279,7 +5524,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder(); - // required .hbase.pb.RegionSpecifier region_b = 2; /** * required .hbase.pb.RegionSpecifier region_b = 2; */ @@ -5293,7 +5537,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder(); - // optional bool forcible = 3 [default = false]; /** * optional bool forcible = 3 [default = false]; */ @@ -5303,7 +5546,6 @@ public final class MasterProtos { */ boolean getForcible(); - // optional uint64 nonce_group = 4 [default = 0]; /** * optional uint64 nonce_group = 4 [default = 0]; */ @@ -5313,7 +5555,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 5 [default = 0]; /** * optional uint64 nonce = 5 [default = 0]; */ @@ -5324,43 +5565,37 @@ public final class MasterProtos { long getNonce(); } /** - * Protobuf type {@code hbase.pb.DispatchMergingRegionsRequest} - * *
    **
    * Dispatch merging the specified regions.
    * 
+ * + * Protobuf type {@code hbase.pb.DispatchMergingRegionsRequest} */ - public static final class DispatchMergingRegionsRequest extends - com.google.protobuf.GeneratedMessage - implements DispatchMergingRegionsRequestOrBuilder { + public static final class DispatchMergingRegionsRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DispatchMergingRegionsRequest) + DispatchMergingRegionsRequestOrBuilder { // Use DispatchMergingRegionsRequest.newBuilder() to construct. - private DispatchMergingRegionsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private DispatchMergingRegionsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DispatchMergingRegionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DispatchMergingRegionsRequest defaultInstance; - public static DispatchMergingRegionsRequest getDefaultInstance() { - return defaultInstance; } - - public DispatchMergingRegionsRequest getDefaultInstanceForType() { - return defaultInstance; + private DispatchMergingRegionsRequest() { + forcible_ = false; + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DispatchMergingRegionsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -5426,7 +5661,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -5437,30 +5672,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DispatchMergingRegionsRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DispatchMergingRegionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DispatchMergingRegionsRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DispatchMergingRegionsRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region_a = 1; public static final int REGION_A_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionA_; /** @@ -5473,16 +5692,15 @@ public final class MasterProtos { * required .hbase.pb.RegionSpecifier region_a = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() { - return regionA_; + return regionA_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionA_; } /** * required .hbase.pb.RegionSpecifier region_a = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() { - return regionA_; + return regionA_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionA_; } - // required .hbase.pb.RegionSpecifier region_b = 2; public static final int REGION_B_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionB_; /** @@ -5495,16 +5713,15 @@ public final class MasterProtos { * required .hbase.pb.RegionSpecifier region_b = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() { - return regionB_; + return regionB_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionB_; } /** * required .hbase.pb.RegionSpecifier region_b = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() { - return regionB_; + return regionB_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionB_; } - // optional bool forcible = 3 [default = false]; public static final int FORCIBLE_FIELD_NUMBER = 3; private boolean forcible_; /** @@ -5520,7 +5737,6 @@ public final class MasterProtos { return forcible_; } - // optional uint64 nonce_group = 4 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 4; private long nonceGroup_; /** @@ -5536,7 +5752,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 5 [default = 0]; public static final int NONCE_FIELD_NUMBER = 5; private long nonce_; /** @@ -5552,17 +5767,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - regionA_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - regionB_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - forcible_ = false; - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegionA()) { memoizedIsInitialized = 0; @@ -5586,12 +5795,11 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionA_); + output.writeMessage(1, getRegionA()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, regionB_); + output.writeMessage(2, getRegionB()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, forcible_); @@ -5602,22 +5810,21 @@ public final class MasterProtos { if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeUInt64(5, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionA_); + .computeMessageSize(1, getRegionA()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, regionB_); + .computeMessageSize(2, getRegionB()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -5631,19 +5838,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(5, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5679,12 +5880,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5702,17 +5901,20 @@ public final class MasterProtos { } if (hasForcible()) { hash = (37 * hash) + FORCIBLE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getForcible()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getForcible()); } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5740,66 +5942,78 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.DispatchMergingRegionsRequest} - * *
      **
      * Dispatch merging the specified regions.
      * 
+ * + * Protobuf type {@code hbase.pb.DispatchMergingRegionsRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DispatchMergingRegionsRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DispatchMergingRegionsRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DispatchMergingRegionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5812,30 +6026,27 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionAFieldBuilder(); getRegionBFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionABuilder_ == null) { - regionA_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionA_ = null; } else { regionABuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (regionBBuilder_ == null) { - regionB_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionB_ = null; } else { regionBBuilder_.clear(); } @@ -5849,10 +6060,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DispatchMergingRegionsRequest_descriptor; @@ -5907,6 +6114,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest)other); @@ -5933,25 +6166,22 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionA()) { - return false; } if (!hasRegionB()) { - return false; } if (!getRegionA().isInitialized()) { - return false; } if (!getRegionB().isInitialized()) { - return false; } return true; @@ -5966,7 +6196,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5976,9 +6206,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region_a = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionA_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionA_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionABuilder_; /** * required .hbase.pb.RegionSpecifier region_a = 1; @@ -5991,7 +6220,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() { if (regionABuilder_ == null) { - return regionA_; + return regionA_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionA_; } else { return regionABuilder_.getMessage(); } @@ -6032,6 +6261,7 @@ public final class MasterProtos { public Builder mergeRegionA(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionABuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + regionA_ != null && regionA_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { regionA_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionA_).mergeFrom(value).buildPartial(); @@ -6050,7 +6280,7 @@ public final class MasterProtos { */ public Builder clearRegionA() { if (regionABuilder_ == null) { - regionA_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionA_ = null; onChanged(); } else { regionABuilder_.clear(); @@ -6073,19 +6303,20 @@ public final class MasterProtos { if (regionABuilder_ != null) { return regionABuilder_.getMessageOrBuilder(); } else { - return regionA_; + return regionA_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionA_; } } /** * required .hbase.pb.RegionSpecifier region_a = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionAFieldBuilder() { if (regionABuilder_ == null) { - regionABuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionABuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - regionA_, + getRegionA(), getParentForChildren(), isClean()); regionA_ = null; @@ -6093,9 +6324,8 @@ public final class MasterProtos { return regionABuilder_; } - // required .hbase.pb.RegionSpecifier region_b = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionB_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier regionB_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBBuilder_; /** * required .hbase.pb.RegionSpecifier region_b = 2; @@ -6108,7 +6338,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() { if (regionBBuilder_ == null) { - return regionB_; + return regionB_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionB_; } else { return regionBBuilder_.getMessage(); } @@ -6149,6 +6379,7 @@ public final class MasterProtos { public Builder mergeRegionB(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + regionB_ != null && regionB_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { regionB_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionB_).mergeFrom(value).buildPartial(); @@ -6167,7 +6398,7 @@ public final class MasterProtos { */ public Builder clearRegionB() { if (regionBBuilder_ == null) { - regionB_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + regionB_ = null; onChanged(); } else { regionBBuilder_.clear(); @@ -6190,19 +6421,20 @@ public final class MasterProtos { if (regionBBuilder_ != null) { return regionBBuilder_.getMessageOrBuilder(); } else { - return regionB_; + return regionB_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : regionB_; } } /** * required .hbase.pb.RegionSpecifier region_b = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionBFieldBuilder() { if (regionBBuilder_ == null) { - regionBBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - regionB_, + getRegionB(), getParentForChildren(), isClean()); regionB_ = null; @@ -6210,7 +6442,6 @@ public final class MasterProtos { return regionBBuilder_; } - // optional bool forcible = 3 [default = false]; private boolean forcible_ ; /** * optional bool forcible = 3 [default = false]; @@ -6243,7 +6474,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce_group = 4 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 4 [default = 0]; @@ -6276,7 +6506,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 5 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 5 [default = 0]; @@ -6308,22 +6537,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DispatchMergingRegionsRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.DispatchMergingRegionsRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest DEFAULT_INSTANCE; static { - defaultInstance = new DispatchMergingRegionsRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DispatchMergingRegionsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DispatchMergingRegionsRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DispatchMergingRegionsRequest) } - public interface DispatchMergingRegionsResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DispatchMergingRegionsResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DispatchMergingRegionsResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -6336,36 +6602,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.DispatchMergingRegionsResponse} */ - public static final class DispatchMergingRegionsResponse extends - com.google.protobuf.GeneratedMessage - implements DispatchMergingRegionsResponseOrBuilder { + public static final class DispatchMergingRegionsResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DispatchMergingRegionsResponse) + DispatchMergingRegionsResponseOrBuilder { // Use DispatchMergingRegionsResponse.newBuilder() to construct. - private DispatchMergingRegionsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private DispatchMergingRegionsResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DispatchMergingRegionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DispatchMergingRegionsResponse defaultInstance; - public static DispatchMergingRegionsResponse getDefaultInstance() { - return defaultInstance; } - - public DispatchMergingRegionsResponse getDefaultInstanceForType() { - return defaultInstance; + private DispatchMergingRegionsResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DispatchMergingRegionsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6395,7 +6653,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -6406,30 +6664,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DispatchMergingRegionsResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DispatchMergingRegionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DispatchMergingRegionsResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DispatchMergingRegionsResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -6445,13 +6687,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -6459,16 +6699,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -6476,19 +6714,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6504,12 +6736,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6519,9 +6749,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6549,46 +6780,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6596,14 +6838,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DispatchMergingRegionsResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DispatchMergingRegionsResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DispatchMergingRegionsResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DispatchMergingRegionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6616,18 +6859,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -6635,10 +6875,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DispatchMergingRegionsResponse_descriptor; @@ -6669,6 +6905,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse)other); @@ -6683,7 +6945,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -6700,7 +6963,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6710,7 +6973,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -6742,22 +7004,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DispatchMergingRegionsResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.DispatchMergingRegionsResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse DEFAULT_INSTANCE; static { - defaultInstance = new DispatchMergingRegionsResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DispatchMergingRegionsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DispatchMergingRegionsResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DispatchMergingRegionsResponse) } - public interface AssignRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface AssignRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.AssignRegionRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -6774,36 +7073,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.AssignRegionRequest} */ - public static final class AssignRegionRequest extends - com.google.protobuf.GeneratedMessage - implements AssignRegionRequestOrBuilder { + public static final class AssignRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.AssignRegionRequest) + AssignRegionRequestOrBuilder { // Use AssignRegionRequest.newBuilder() to construct. - private AssignRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private AssignRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private AssignRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AssignRegionRequest defaultInstance; - public static AssignRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public AssignRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private AssignRegionRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private AssignRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6841,7 +7131,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -6852,30 +7142,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AssignRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AssignRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public AssignRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AssignRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -6888,22 +7162,20 @@ public final class MasterProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -6919,36 +7191,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6964,12 +7228,10 @@ public final class MasterProtos { result = result && getRegion() .equals(other.getRegion()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6981,7 +7243,7 @@ public final class MasterProtos { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7009,46 +7271,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -7056,14 +7329,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.AssignRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.AssignRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AssignRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AssignRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7076,23 +7350,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -7100,10 +7371,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AssignRegionRequest_descriptor; @@ -7138,6 +7405,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest)other); @@ -7152,17 +7445,16 @@ public final class MasterProtos { if (other.hasRegion()) { mergeRegion(other.getRegion()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } return true; @@ -7177,7 +7469,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7187,9 +7479,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -7202,7 +7493,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -7243,6 +7534,7 @@ public final class MasterProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -7261,7 +7553,7 @@ public final class MasterProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -7284,73 +7576,103 @@ public final class MasterProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.AssignRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.AssignRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new AssignRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AssignRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AssignRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.AssignRegionRequest) } - public interface AssignRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface AssignRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.AssignRegionResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.AssignRegionResponse} */ - public static final class AssignRegionResponse extends - com.google.protobuf.GeneratedMessage - implements AssignRegionResponseOrBuilder { + public static final class AssignRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.AssignRegionResponse) + AssignRegionResponseOrBuilder { // Use AssignRegionResponse.newBuilder() to construct. - private AssignRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private AssignRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private AssignRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AssignRegionResponse defaultInstance; - public static AssignRegionResponse getDefaultInstance() { - return defaultInstance; } - - public AssignRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private AssignRegionResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private AssignRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -7374,7 +7696,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7385,34 +7707,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AssignRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AssignRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public AssignRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AssignRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -7420,29 +7726,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7453,12 +7751,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7466,7 +7762,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7494,46 +7790,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -7541,14 +7848,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.AssignRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.AssignRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AssignRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AssignRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7561,27 +7869,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AssignRegionResponse_descriptor; @@ -7605,6 +7906,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse)other); @@ -7616,7 +7943,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -7633,7 +7961,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7641,22 +7969,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.AssignRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.AssignRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new AssignRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AssignRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AssignRegionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.AssignRegionResponse) } - public interface UnassignRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface UnassignRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.UnassignRegionRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -7670,7 +8035,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // optional bool force = 2 [default = false]; /** * optional bool force = 2 [default = false]; */ @@ -7683,36 +8047,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.UnassignRegionRequest} */ - public static final class UnassignRegionRequest extends - com.google.protobuf.GeneratedMessage - implements UnassignRegionRequestOrBuilder { + public static final class UnassignRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.UnassignRegionRequest) + UnassignRegionRequestOrBuilder { // Use UnassignRegionRequest.newBuilder() to construct. - private UnassignRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private UnassignRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private UnassignRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final UnassignRegionRequest defaultInstance; - public static UnassignRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public UnassignRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private UnassignRegionRequest() { + force_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private UnassignRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7755,7 +8111,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7766,30 +8122,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_UnassignRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_UnassignRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public UnassignRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new UnassignRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -7802,16 +8142,15 @@ public final class MasterProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - // optional bool force = 2 [default = false]; public static final int FORCE_FIELD_NUMBER = 2; private boolean force_; /** @@ -7827,14 +8166,11 @@ public final class MasterProtos { return force_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - force_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -7850,43 +8186,35 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, force_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, force_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7907,12 +8235,10 @@ public final class MasterProtos { result = result && (getForce() == other.getForce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7926,9 +8252,10 @@ public final class MasterProtos { } if (hasForce()) { hash = (37 * hash) + FORCE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getForce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getForce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7956,46 +8283,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -8003,14 +8341,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.UnassignRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.UnassignRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_UnassignRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_UnassignRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8023,23 +8362,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -8049,10 +8385,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_UnassignRegionRequest_descriptor; @@ -8091,6 +8423,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest)other); @@ -8108,17 +8466,16 @@ public final class MasterProtos { if (other.hasForce()) { setForce(other.getForce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } return true; @@ -8133,7 +8490,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8143,9 +8500,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -8158,7 +8514,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -8199,6 +8555,7 @@ public final class MasterProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -8217,7 +8574,7 @@ public final class MasterProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -8240,19 +8597,20 @@ public final class MasterProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; @@ -8260,7 +8618,6 @@ public final class MasterProtos { return regionBuilder_; } - // optional bool force = 2 [default = false]; private boolean force_ ; /** * optional bool force = 2 [default = false]; @@ -8292,54 +8649,83 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.UnassignRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.UnassignRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new UnassignRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public UnassignRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UnassignRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.UnassignRegionRequest) } - public interface UnassignRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface UnassignRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.UnassignRegionResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.UnassignRegionResponse} */ - public static final class UnassignRegionResponse extends - com.google.protobuf.GeneratedMessage - implements UnassignRegionResponseOrBuilder { + public static final class UnassignRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.UnassignRegionResponse) + UnassignRegionResponseOrBuilder { // Use UnassignRegionResponse.newBuilder() to construct. - private UnassignRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private UnassignRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private UnassignRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final UnassignRegionResponse defaultInstance; - public static UnassignRegionResponse getDefaultInstance() { - return defaultInstance; } - - public UnassignRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private UnassignRegionResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private UnassignRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -8363,7 +8749,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -8374,34 +8760,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_UnassignRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_UnassignRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public UnassignRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new UnassignRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -8409,29 +8779,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8442,12 +8804,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -8455,7 +8815,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -8483,46 +8843,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -8530,14 +8901,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.UnassignRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.UnassignRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_UnassignRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_UnassignRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8550,27 +8922,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_UnassignRegionResponse_descriptor; @@ -8594,6 +8959,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse)other); @@ -8605,7 +8996,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -8622,7 +9014,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8630,22 +9022,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.UnassignRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.UnassignRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new UnassignRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public UnassignRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UnassignRegionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.UnassignRegionResponse) } - public interface OfflineRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface OfflineRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.OfflineRegionRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -8662,36 +9091,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.OfflineRegionRequest} */ - public static final class OfflineRegionRequest extends - com.google.protobuf.GeneratedMessage - implements OfflineRegionRequestOrBuilder { + public static final class OfflineRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.OfflineRegionRequest) + OfflineRegionRequestOrBuilder { // Use OfflineRegionRequest.newBuilder() to construct. - private OfflineRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private OfflineRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private OfflineRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final OfflineRegionRequest defaultInstance; - public static OfflineRegionRequest getDefaultInstance() { - return defaultInstance; } - - public OfflineRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private OfflineRegionRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private OfflineRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8729,7 +9149,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -8740,30 +9160,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_OfflineRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_OfflineRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public OfflineRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new OfflineRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -8776,22 +9180,20 @@ public final class MasterProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -8807,36 +9209,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8852,12 +9246,10 @@ public final class MasterProtos { result = result && getRegion() .equals(other.getRegion()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -8869,7 +9261,7 @@ public final class MasterProtos { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -8897,46 +9289,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -8944,14 +9347,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.OfflineRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.OfflineRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_OfflineRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_OfflineRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8964,23 +9368,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -8988,10 +9389,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_OfflineRegionRequest_descriptor; @@ -9026,6 +9423,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest)other); @@ -9040,17 +9463,16 @@ public final class MasterProtos { if (other.hasRegion()) { mergeRegion(other.getRegion()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } return true; @@ -9065,7 +9487,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9075,9 +9497,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -9090,7 +9511,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -9131,6 +9552,7 @@ public final class MasterProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -9149,7 +9571,7 @@ public final class MasterProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -9172,73 +9594,103 @@ public final class MasterProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.OfflineRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.OfflineRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new OfflineRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public OfflineRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OfflineRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.OfflineRegionRequest) } - public interface OfflineRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface OfflineRegionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.OfflineRegionResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.OfflineRegionResponse} */ - public static final class OfflineRegionResponse extends - com.google.protobuf.GeneratedMessage - implements OfflineRegionResponseOrBuilder { + public static final class OfflineRegionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.OfflineRegionResponse) + OfflineRegionResponseOrBuilder { // Use OfflineRegionResponse.newBuilder() to construct. - private OfflineRegionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private OfflineRegionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private OfflineRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final OfflineRegionResponse defaultInstance; - public static OfflineRegionResponse getDefaultInstance() { - return defaultInstance; } - - public OfflineRegionResponse getDefaultInstanceForType() { - return defaultInstance; + private OfflineRegionResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private OfflineRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -9262,7 +9714,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -9273,34 +9725,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_OfflineRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_OfflineRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public OfflineRegionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new OfflineRegionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -9308,29 +9744,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9341,12 +9769,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9354,7 +9780,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9382,46 +9808,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -9429,14 +9866,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.OfflineRegionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.OfflineRegionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_OfflineRegionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_OfflineRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9449,27 +9887,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_OfflineRegionResponse_descriptor; @@ -9493,6 +9924,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse)other); @@ -9504,7 +9961,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -9521,7 +9979,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9529,22 +9987,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.OfflineRegionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.OfflineRegionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse DEFAULT_INSTANCE; static { - defaultInstance = new OfflineRegionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public OfflineRegionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OfflineRegionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.OfflineRegionResponse) } - public interface CreateTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CreateTableRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CreateTableRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableSchema table_schema = 1; /** * required .hbase.pb.TableSchema table_schema = 1; */ @@ -9558,7 +10053,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); - // repeated bytes split_keys = 2; /** * repeated bytes split_keys = 2; */ @@ -9572,7 +10066,6 @@ public final class MasterProtos { */ com.google.protobuf.ByteString getSplitKeys(int index); - // optional uint64 nonce_group = 3 [default = 0]; /** * optional uint64 nonce_group = 3 [default = 0]; */ @@ -9582,7 +10075,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 4 [default = 0]; /** * optional uint64 nonce = 4 [default = 0]; */ @@ -9595,36 +10087,30 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.CreateTableRequest} */ - public static final class CreateTableRequest extends - com.google.protobuf.GeneratedMessage - implements CreateTableRequestOrBuilder { + public static final class CreateTableRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CreateTableRequest) + CreateTableRequestOrBuilder { // Use CreateTableRequest.newBuilder() to construct. - private CreateTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private CreateTableRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CreateTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CreateTableRequest defaultInstance; - public static CreateTableRequest getDefaultInstance() { - return defaultInstance; } - - public CreateTableRequest getDefaultInstanceForType() { - return defaultInstance; + private CreateTableRequest() { + splitKeys_ = java.util.Collections.emptyList(); + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CreateTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -9680,7 +10166,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { splitKeys_ = java.util.Collections.unmodifiableList(splitKeys_); @@ -9694,30 +10180,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CreateTableRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CreateTableRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableSchema table_schema = 1; public static final int TABLE_SCHEMA_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_; /** @@ -9730,16 +10200,15 @@ public final class MasterProtos { * required .hbase.pb.TableSchema table_schema = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } /** * required .hbase.pb.TableSchema table_schema = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } - // repeated bytes split_keys = 2; public static final int SPLIT_KEYS_FIELD_NUMBER = 2; private java.util.List splitKeys_; /** @@ -9762,7 +10231,6 @@ public final class MasterProtos { return splitKeys_.get(index); } - // optional uint64 nonce_group = 3 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 3; private long nonceGroup_; /** @@ -9778,7 +10246,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 4 [default = 0]; public static final int NONCE_FIELD_NUMBER = 4; private long nonce_; /** @@ -9794,16 +10261,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - splitKeys_ = java.util.Collections.emptyList(); - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableSchema()) { memoizedIsInitialized = 0; @@ -9819,9 +10281,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableSchema_); + output.writeMessage(1, getTableSchema()); } for (int i = 0; i < splitKeys_.size(); i++) { output.writeBytes(2, splitKeys_.get(i)); @@ -9832,18 +10293,17 @@ public final class MasterProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(4, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableSchema_); + .computeMessageSize(1, getTableSchema()); } { int dataSize = 0; @@ -9862,19 +10322,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9902,12 +10356,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9925,13 +10377,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9959,46 +10413,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -10006,14 +10471,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.CreateTableRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CreateTableRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10026,23 +10492,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableSchemaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; } else { tableSchemaBuilder_.clear(); } @@ -10056,10 +10519,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateTableRequest_descriptor; @@ -10107,6 +10566,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest)other); @@ -10137,17 +10622,16 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableSchema()) { - return false; } if (!getTableSchema().isInitialized()) { - return false; } return true; @@ -10162,7 +10646,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -10172,9 +10656,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableSchema table_schema = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** * required .hbase.pb.TableSchema table_schema = 1; @@ -10187,7 +10670,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } else { return tableSchemaBuilder_.getMessage(); } @@ -10228,6 +10711,7 @@ public final class MasterProtos { public Builder mergeTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableSchema_ != null && tableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); @@ -10246,7 +10730,7 @@ public final class MasterProtos { */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; onChanged(); } else { tableSchemaBuilder_.clear(); @@ -10269,19 +10753,20 @@ public final class MasterProtos { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); } else { - return tableSchema_; + return tableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } } /** * required .hbase.pb.TableSchema table_schema = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - tableSchema_, + getTableSchema(), getParentForChildren(), isClean()); tableSchema_ = null; @@ -10289,7 +10774,6 @@ public final class MasterProtos { return tableSchemaBuilder_; } - // repeated bytes split_keys = 2; private java.util.List splitKeys_ = java.util.Collections.emptyList(); private void ensureSplitKeysIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { @@ -10347,7 +10831,8 @@ public final class MasterProtos { public Builder addAllSplitKeys( java.lang.Iterable values) { ensureSplitKeysIsMutable(); - super.addAll(values, splitKeys_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, splitKeys_); onChanged(); return this; } @@ -10361,7 +10846,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce_group = 3 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 3 [default = 0]; @@ -10394,7 +10878,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 4 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 4 [default = 0]; @@ -10426,22 +10909,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CreateTableRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.CreateTableRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest DEFAULT_INSTANCE; static { - defaultInstance = new CreateTableRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CreateTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CreateTableRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CreateTableRequest) } - public interface CreateTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CreateTableResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CreateTableResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -10454,36 +10974,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.CreateTableResponse} */ - public static final class CreateTableResponse extends - com.google.protobuf.GeneratedMessage - implements CreateTableResponseOrBuilder { + public static final class CreateTableResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CreateTableResponse) + CreateTableResponseOrBuilder { // Use CreateTableResponse.newBuilder() to construct. - private CreateTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private CreateTableResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CreateTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CreateTableResponse defaultInstance; - public static CreateTableResponse getDefaultInstance() { - return defaultInstance; } - - public CreateTableResponse getDefaultInstanceForType() { - return defaultInstance; + private CreateTableResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CreateTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -10513,7 +11025,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -10524,30 +11036,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CreateTableResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CreateTableResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -10563,13 +11059,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -10577,16 +11071,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -10594,19 +11086,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -10622,12 +11108,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -10637,9 +11121,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -10667,46 +11152,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -10714,14 +11210,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.CreateTableResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CreateTableResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10734,18 +11231,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -10753,10 +11247,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateTableResponse_descriptor; @@ -10787,6 +11277,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse)other); @@ -10801,7 +11317,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -10818,7 +11335,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -10828,7 +11345,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -10860,22 +11376,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CreateTableResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.CreateTableResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse DEFAULT_INSTANCE; static { - defaultInstance = new CreateTableResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CreateTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CreateTableResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CreateTableResponse) } - public interface DeleteTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteTableRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteTableRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -10889,7 +11442,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // optional uint64 nonce_group = 2 [default = 0]; /** * optional uint64 nonce_group = 2 [default = 0]; */ @@ -10899,7 +11451,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 3 [default = 0]; /** * optional uint64 nonce = 3 [default = 0]; */ @@ -10912,36 +11463,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.DeleteTableRequest} */ - public static final class DeleteTableRequest extends - com.google.protobuf.GeneratedMessage - implements DeleteTableRequestOrBuilder { + public static final class DeleteTableRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteTableRequest) + DeleteTableRequestOrBuilder { // Use DeleteTableRequest.newBuilder() to construct. - private DeleteTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteTableRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DeleteTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteTableRequest defaultInstance; - public static DeleteTableRequest getDefaultInstance() { - return defaultInstance; } - - public DeleteTableRequest getDefaultInstanceForType() { - return defaultInstance; + private DeleteTableRequest() { + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -10989,7 +11533,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11000,30 +11544,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteTableRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteTableRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -11036,16 +11564,15 @@ public final class MasterProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // optional uint64 nonce_group = 2 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 2; private long nonceGroup_; /** @@ -11061,7 +11588,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 3 [default = 0]; public static final int NONCE_FIELD_NUMBER = 3; private long nonce_; /** @@ -11077,15 +11603,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -11101,9 +11623,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, nonceGroup_); @@ -11111,18 +11632,17 @@ public final class MasterProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -11132,19 +11652,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -11170,12 +11684,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -11189,13 +11701,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -11223,46 +11737,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -11270,14 +11795,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DeleteTableRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteTableRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11290,23 +11816,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -11318,10 +11841,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteTableRequest_descriptor; @@ -11364,6 +11883,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest)other); @@ -11384,17 +11929,16 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -11409,7 +11953,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11419,9 +11963,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -11434,7 +11977,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -11475,6 +12018,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -11493,7 +12037,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -11516,19 +12060,20 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -11536,7 +12081,6 @@ public final class MasterProtos { return tableNameBuilder_; } - // optional uint64 nonce_group = 2 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 2 [default = 0]; @@ -11569,7 +12113,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 3 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 3 [default = 0]; @@ -11601,22 +12144,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteTableRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteTableRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest DEFAULT_INSTANCE; static { - defaultInstance = new DeleteTableRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteTableRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteTableRequest) } - public interface DeleteTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteTableResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteTableResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -11629,36 +12209,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.DeleteTableResponse} */ - public static final class DeleteTableResponse extends - com.google.protobuf.GeneratedMessage - implements DeleteTableResponseOrBuilder { + public static final class DeleteTableResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteTableResponse) + DeleteTableResponseOrBuilder { // Use DeleteTableResponse.newBuilder() to construct. - private DeleteTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteTableResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DeleteTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteTableResponse defaultInstance; - public static DeleteTableResponse getDefaultInstance() { - return defaultInstance; } - - public DeleteTableResponse getDefaultInstanceForType() { - return defaultInstance; + private DeleteTableResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -11688,7 +12260,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11699,30 +12271,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteTableResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteTableResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -11738,13 +12294,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -11752,16 +12306,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -11769,19 +12321,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -11797,12 +12343,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -11812,9 +12356,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -11842,46 +12387,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -11889,14 +12445,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DeleteTableResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteTableResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11909,18 +12466,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -11928,10 +12482,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteTableResponse_descriptor; @@ -11962,6 +12512,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse)other); @@ -11976,7 +12552,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -11993,7 +12570,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -12003,7 +12580,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -12035,22 +12611,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteTableResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteTableResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse DEFAULT_INSTANCE; static { - defaultInstance = new DeleteTableResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteTableResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteTableResponse) } - public interface TruncateTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TruncateTableRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TruncateTableRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName tableName = 1; /** * required .hbase.pb.TableName tableName = 1; */ @@ -12064,7 +12677,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // optional bool preserveSplits = 2 [default = false]; /** * optional bool preserveSplits = 2 [default = false]; */ @@ -12074,7 +12686,6 @@ public final class MasterProtos { */ boolean getPreserveSplits(); - // optional uint64 nonce_group = 3 [default = 0]; /** * optional uint64 nonce_group = 3 [default = 0]; */ @@ -12084,7 +12695,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 4 [default = 0]; /** * optional uint64 nonce = 4 [default = 0]; */ @@ -12097,36 +12707,30 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.TruncateTableRequest} */ - public static final class TruncateTableRequest extends - com.google.protobuf.GeneratedMessage - implements TruncateTableRequestOrBuilder { + public static final class TruncateTableRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TruncateTableRequest) + TruncateTableRequestOrBuilder { // Use TruncateTableRequest.newBuilder() to construct. - private TruncateTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private TruncateTableRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TruncateTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TruncateTableRequest defaultInstance; - public static TruncateTableRequest getDefaultInstance() { - return defaultInstance; } - - public TruncateTableRequest getDefaultInstanceForType() { - return defaultInstance; + private TruncateTableRequest() { + preserveSplits_ = false; + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TruncateTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -12179,7 +12783,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -12190,30 +12794,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_TruncateTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_TruncateTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TruncateTableRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TruncateTableRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -12226,16 +12814,15 @@ public final class MasterProtos { * required .hbase.pb.TableName tableName = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName tableName = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // optional bool preserveSplits = 2 [default = false]; public static final int PRESERVESPLITS_FIELD_NUMBER = 2; private boolean preserveSplits_; /** @@ -12251,7 +12838,6 @@ public final class MasterProtos { return preserveSplits_; } - // optional uint64 nonce_group = 3 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 3; private long nonceGroup_; /** @@ -12267,7 +12853,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 4 [default = 0]; public static final int NONCE_FIELD_NUMBER = 4; private long nonce_; /** @@ -12283,16 +12868,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - preserveSplits_ = false; - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -12308,9 +12888,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, preserveSplits_); @@ -12321,18 +12900,17 @@ public final class MasterProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -12346,19 +12924,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -12389,12 +12961,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -12408,17 +12978,20 @@ public final class MasterProtos { } if (hasPreserveSplits()) { hash = (37 * hash) + PRESERVESPLITS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getPreserveSplits()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getPreserveSplits()); } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -12446,46 +13019,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -12493,14 +13077,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.TruncateTableRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TruncateTableRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_TruncateTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_TruncateTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -12513,23 +13098,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -12543,10 +13125,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_TruncateTableRequest_descriptor; @@ -12593,6 +13171,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest)other); @@ -12616,17 +13220,16 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -12641,7 +13244,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -12651,9 +13254,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName tableName = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName tableName = 1; @@ -12666,7 +13268,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -12707,6 +13309,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -12725,7 +13328,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -12748,19 +13351,20 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName tableName = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -12768,7 +13372,6 @@ public final class MasterProtos { return tableNameBuilder_; } - // optional bool preserveSplits = 2 [default = false]; private boolean preserveSplits_ ; /** * optional bool preserveSplits = 2 [default = false]; @@ -12801,7 +13404,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce_group = 3 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 3 [default = 0]; @@ -12834,7 +13436,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 4 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 4 [default = 0]; @@ -12866,22 +13467,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TruncateTableRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.TruncateTableRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest DEFAULT_INSTANCE; static { - defaultInstance = new TruncateTableRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TruncateTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TruncateTableRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TruncateTableRequest) } - public interface TruncateTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TruncateTableResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TruncateTableResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -12894,36 +13532,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.TruncateTableResponse} */ - public static final class TruncateTableResponse extends - com.google.protobuf.GeneratedMessage - implements TruncateTableResponseOrBuilder { + public static final class TruncateTableResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TruncateTableResponse) + TruncateTableResponseOrBuilder { // Use TruncateTableResponse.newBuilder() to construct. - private TruncateTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private TruncateTableResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TruncateTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TruncateTableResponse defaultInstance; - public static TruncateTableResponse getDefaultInstance() { - return defaultInstance; } - - public TruncateTableResponse getDefaultInstanceForType() { - return defaultInstance; + private TruncateTableResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TruncateTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -12953,7 +13583,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -12964,30 +13594,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_TruncateTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_TruncateTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TruncateTableResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TruncateTableResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -13003,13 +13617,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -13017,16 +13629,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -13034,19 +13644,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -13062,12 +13666,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -13077,9 +13679,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -13107,46 +13710,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -13154,14 +13768,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.TruncateTableResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TruncateTableResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_TruncateTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_TruncateTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -13174,18 +13789,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -13193,10 +13805,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_TruncateTableResponse_descriptor; @@ -13227,6 +13835,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse)other); @@ -13241,7 +13875,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -13258,7 +13893,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -13268,7 +13903,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -13300,22 +13934,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TruncateTableResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.TruncateTableResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse DEFAULT_INSTANCE; static { - defaultInstance = new TruncateTableResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TruncateTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TruncateTableResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TruncateTableResponse) } - public interface EnableTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface EnableTableRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.EnableTableRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -13329,7 +14000,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // optional uint64 nonce_group = 2 [default = 0]; /** * optional uint64 nonce_group = 2 [default = 0]; */ @@ -13339,7 +14009,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 3 [default = 0]; /** * optional uint64 nonce = 3 [default = 0]; */ @@ -13352,36 +14021,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.EnableTableRequest} */ - public static final class EnableTableRequest extends - com.google.protobuf.GeneratedMessage - implements EnableTableRequestOrBuilder { + public static final class EnableTableRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.EnableTableRequest) + EnableTableRequestOrBuilder { // Use EnableTableRequest.newBuilder() to construct. - private EnableTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private EnableTableRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private EnableTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final EnableTableRequest defaultInstance; - public static EnableTableRequest getDefaultInstance() { - return defaultInstance; } - - public EnableTableRequest getDefaultInstanceForType() { - return defaultInstance; + private EnableTableRequest() { + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private EnableTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -13429,7 +14091,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -13440,30 +14102,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public EnableTableRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new EnableTableRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -13476,16 +14122,15 @@ public final class MasterProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // optional uint64 nonce_group = 2 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 2; private long nonceGroup_; /** @@ -13501,7 +14146,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 3 [default = 0]; public static final int NONCE_FIELD_NUMBER = 3; private long nonce_; /** @@ -13517,15 +14161,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -13541,9 +14181,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, nonceGroup_); @@ -13551,18 +14190,17 @@ public final class MasterProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -13572,19 +14210,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -13610,12 +14242,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -13629,13 +14259,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -13663,46 +14295,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -13710,14 +14353,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.EnableTableRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.EnableTableRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -13730,23 +14374,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -13758,10 +14399,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableTableRequest_descriptor; @@ -13804,6 +14441,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest)other); @@ -13824,17 +14487,16 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -13849,7 +14511,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -13859,9 +14521,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -13874,7 +14535,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -13915,6 +14576,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -13933,7 +14595,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -13956,19 +14618,20 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -13976,7 +14639,6 @@ public final class MasterProtos { return tableNameBuilder_; } - // optional uint64 nonce_group = 2 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 2 [default = 0]; @@ -14009,7 +14671,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 3 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 3 [default = 0]; @@ -14041,22 +14702,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.EnableTableRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.EnableTableRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest DEFAULT_INSTANCE; static { - defaultInstance = new EnableTableRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public EnableTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EnableTableRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.EnableTableRequest) } - public interface EnableTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface EnableTableResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.EnableTableResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -14069,36 +14767,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.EnableTableResponse} */ - public static final class EnableTableResponse extends - com.google.protobuf.GeneratedMessage - implements EnableTableResponseOrBuilder { + public static final class EnableTableResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.EnableTableResponse) + EnableTableResponseOrBuilder { // Use EnableTableResponse.newBuilder() to construct. - private EnableTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private EnableTableResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private EnableTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final EnableTableResponse defaultInstance; - public static EnableTableResponse getDefaultInstance() { - return defaultInstance; } - - public EnableTableResponse getDefaultInstanceForType() { - return defaultInstance; + private EnableTableResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private EnableTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -14128,7 +14818,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -14139,30 +14829,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public EnableTableResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new EnableTableResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -14178,13 +14852,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -14192,16 +14864,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -14209,19 +14879,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -14237,12 +14901,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -14252,9 +14914,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -14282,46 +14945,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -14329,14 +15003,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.EnableTableResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.EnableTableResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -14349,18 +15024,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -14368,10 +15040,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableTableResponse_descriptor; @@ -14402,6 +15070,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse)other); @@ -14416,7 +15110,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -14433,7 +15128,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -14443,7 +15138,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -14475,22 +15169,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.EnableTableResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.EnableTableResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse DEFAULT_INSTANCE; static { - defaultInstance = new EnableTableResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public EnableTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EnableTableResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.EnableTableResponse) } - public interface DisableTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DisableTableRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DisableTableRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -14504,7 +15235,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // optional uint64 nonce_group = 2 [default = 0]; /** * optional uint64 nonce_group = 2 [default = 0]; */ @@ -14514,7 +15244,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 3 [default = 0]; /** * optional uint64 nonce = 3 [default = 0]; */ @@ -14527,36 +15256,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.DisableTableRequest} */ - public static final class DisableTableRequest extends - com.google.protobuf.GeneratedMessage - implements DisableTableRequestOrBuilder { + public static final class DisableTableRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DisableTableRequest) + DisableTableRequestOrBuilder { // Use DisableTableRequest.newBuilder() to construct. - private DisableTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private DisableTableRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DisableTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DisableTableRequest defaultInstance; - public static DisableTableRequest getDefaultInstance() { - return defaultInstance; } - - public DisableTableRequest getDefaultInstanceForType() { - return defaultInstance; + private DisableTableRequest() { + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DisableTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -14604,7 +15326,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -14615,30 +15337,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DisableTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DisableTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DisableTableRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DisableTableRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -14651,16 +15357,15 @@ public final class MasterProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // optional uint64 nonce_group = 2 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 2; private long nonceGroup_; /** @@ -14676,7 +15381,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 3 [default = 0]; public static final int NONCE_FIELD_NUMBER = 3; private long nonce_; /** @@ -14692,15 +15396,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -14716,9 +15416,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, nonceGroup_); @@ -14726,18 +15425,17 @@ public final class MasterProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -14747,19 +15445,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -14785,12 +15477,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -14804,13 +15494,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -14838,46 +15530,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -14885,14 +15588,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DisableTableRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DisableTableRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DisableTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DisableTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -14905,23 +15609,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -14933,10 +15634,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DisableTableRequest_descriptor; @@ -14979,6 +15676,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest)other); @@ -14999,17 +15722,16 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -15024,7 +15746,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15034,9 +15756,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -15049,7 +15770,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -15090,6 +15811,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -15108,7 +15830,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -15131,19 +15853,20 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -15151,7 +15874,6 @@ public final class MasterProtos { return tableNameBuilder_; } - // optional uint64 nonce_group = 2 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 2 [default = 0]; @@ -15184,7 +15906,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 3 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 3 [default = 0]; @@ -15216,22 +15937,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DisableTableRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.DisableTableRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest DEFAULT_INSTANCE; static { - defaultInstance = new DisableTableRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DisableTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DisableTableRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DisableTableRequest) } - public interface DisableTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DisableTableResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DisableTableResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -15244,36 +16002,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.DisableTableResponse} */ - public static final class DisableTableResponse extends - com.google.protobuf.GeneratedMessage - implements DisableTableResponseOrBuilder { + public static final class DisableTableResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DisableTableResponse) + DisableTableResponseOrBuilder { // Use DisableTableResponse.newBuilder() to construct. - private DisableTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private DisableTableResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DisableTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DisableTableResponse defaultInstance; - public static DisableTableResponse getDefaultInstance() { - return defaultInstance; } - - public DisableTableResponse getDefaultInstanceForType() { - return defaultInstance; + private DisableTableResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DisableTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -15303,7 +16053,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -15314,30 +16064,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DisableTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DisableTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DisableTableResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DisableTableResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -15353,13 +16087,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -15367,16 +16099,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -15384,19 +16114,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -15412,12 +16136,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -15427,9 +16149,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -15457,46 +16180,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -15504,14 +16238,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DisableTableResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DisableTableResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DisableTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DisableTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -15524,18 +16259,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -15543,10 +16275,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DisableTableResponse_descriptor; @@ -15577,6 +16305,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse)other); @@ -15591,7 +16345,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -15608,7 +16363,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -15618,7 +16373,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -15650,22 +16404,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DisableTableResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.DisableTableResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse DEFAULT_INSTANCE; static { - defaultInstance = new DisableTableResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DisableTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DisableTableResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DisableTableResponse) } - public interface ModifyTableRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ModifyTableRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ModifyTableRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -15679,7 +16470,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required .hbase.pb.TableSchema table_schema = 2; /** * required .hbase.pb.TableSchema table_schema = 2; */ @@ -15693,7 +16483,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); - // optional uint64 nonce_group = 3 [default = 0]; /** * optional uint64 nonce_group = 3 [default = 0]; */ @@ -15703,7 +16492,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 4 [default = 0]; /** * optional uint64 nonce = 4 [default = 0]; */ @@ -15716,36 +16504,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ModifyTableRequest} */ - public static final class ModifyTableRequest extends - com.google.protobuf.GeneratedMessage - implements ModifyTableRequestOrBuilder { + public static final class ModifyTableRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ModifyTableRequest) + ModifyTableRequestOrBuilder { // Use ModifyTableRequest.newBuilder() to construct. - private ModifyTableRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ModifyTableRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ModifyTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ModifyTableRequest defaultInstance; - public static ModifyTableRequest getDefaultInstance() { - return defaultInstance; } - - public ModifyTableRequest getDefaultInstanceForType() { - return defaultInstance; + private ModifyTableRequest() { + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ModifyTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -15806,7 +16587,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -15817,30 +16598,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ModifyTableRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ModifyTableRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -15853,16 +16618,15 @@ public final class MasterProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // required .hbase.pb.TableSchema table_schema = 2; public static final int TABLE_SCHEMA_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_; /** @@ -15875,16 +16639,15 @@ public final class MasterProtos { * required .hbase.pb.TableSchema table_schema = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } /** * required .hbase.pb.TableSchema table_schema = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } - // optional uint64 nonce_group = 3 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 3; private long nonceGroup_; /** @@ -15900,7 +16663,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 4 [default = 0]; public static final int NONCE_FIELD_NUMBER = 4; private long nonce_; /** @@ -15916,16 +16678,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -15949,12 +16706,11 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tableSchema_); + output.writeMessage(2, getTableSchema()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, nonceGroup_); @@ -15962,22 +16718,21 @@ public final class MasterProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tableSchema_); + .computeMessageSize(2, getTableSchema()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -15987,19 +16742,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -16030,12 +16779,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -16053,13 +16800,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -16087,46 +16836,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -16134,14 +16894,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ModifyTableRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ModifyTableRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyTableRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -16154,30 +16915,27 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getTableSchemaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; } else { tableSchemaBuilder_.clear(); } @@ -16189,10 +16947,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyTableRequest_descriptor; @@ -16243,6 +16997,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest)other); @@ -16266,25 +17046,22 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!hasTableSchema()) { - return false; } if (!getTableName().isInitialized()) { - return false; } if (!getTableSchema().isInitialized()) { - return false; } return true; @@ -16299,7 +17076,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -16309,9 +17086,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -16324,7 +17100,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -16365,6 +17141,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -16383,7 +17160,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -16406,19 +17183,20 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -16426,9 +17204,8 @@ public final class MasterProtos { return tableNameBuilder_; } - // required .hbase.pb.TableSchema table_schema = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** * required .hbase.pb.TableSchema table_schema = 2; @@ -16441,7 +17218,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } else { return tableSchemaBuilder_.getMessage(); } @@ -16482,6 +17259,7 @@ public final class MasterProtos { public Builder mergeTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + tableSchema_ != null && tableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); @@ -16500,7 +17278,7 @@ public final class MasterProtos { */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; onChanged(); } else { tableSchemaBuilder_.clear(); @@ -16523,19 +17301,20 @@ public final class MasterProtos { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); } else { - return tableSchema_; + return tableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } } /** * required .hbase.pb.TableSchema table_schema = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - tableSchema_, + getTableSchema(), getParentForChildren(), isClean()); tableSchema_ = null; @@ -16543,7 +17322,6 @@ public final class MasterProtos { return tableSchemaBuilder_; } - // optional uint64 nonce_group = 3 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 3 [default = 0]; @@ -16576,7 +17354,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 4 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 4 [default = 0]; @@ -16608,22 +17385,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ModifyTableRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ModifyTableRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest DEFAULT_INSTANCE; static { - defaultInstance = new ModifyTableRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ModifyTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyTableRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ModifyTableRequest) } - public interface ModifyTableResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ModifyTableResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ModifyTableResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -16636,36 +17450,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ModifyTableResponse} */ - public static final class ModifyTableResponse extends - com.google.protobuf.GeneratedMessage - implements ModifyTableResponseOrBuilder { + public static final class ModifyTableResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ModifyTableResponse) + ModifyTableResponseOrBuilder { // Use ModifyTableResponse.newBuilder() to construct. - private ModifyTableResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ModifyTableResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ModifyTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ModifyTableResponse defaultInstance; - public static ModifyTableResponse getDefaultInstance() { - return defaultInstance; - } - - public ModifyTableResponse getDefaultInstanceForType() { - return defaultInstance; + private ModifyTableResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ModifyTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -16695,7 +17501,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -16706,30 +17512,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ModifyTableResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ModifyTableResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -16745,13 +17535,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -16759,16 +17547,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -16776,19 +17562,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -16804,12 +17584,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -16819,9 +17597,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -16849,46 +17628,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -16896,14 +17686,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ModifyTableResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ModifyTableResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyTableResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -16916,18 +17707,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -16935,10 +17723,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyTableResponse_descriptor; @@ -16969,6 +17753,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse)other); @@ -16983,7 +17793,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -17000,7 +17811,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -17010,7 +17821,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -17042,22 +17852,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ModifyTableResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ModifyTableResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse DEFAULT_INSTANCE; static { - defaultInstance = new ModifyTableResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ModifyTableResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyTableResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ModifyTableResponse) } - public interface CreateNamespaceRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CreateNamespaceRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CreateNamespaceRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ @@ -17071,7 +17918,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(); - // optional uint64 nonce_group = 2 [default = 0]; /** * optional uint64 nonce_group = 2 [default = 0]; */ @@ -17081,7 +17927,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 3 [default = 0]; /** * optional uint64 nonce = 3 [default = 0]; */ @@ -17094,36 +17939,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.CreateNamespaceRequest} */ - public static final class CreateNamespaceRequest extends - com.google.protobuf.GeneratedMessage - implements CreateNamespaceRequestOrBuilder { + public static final class CreateNamespaceRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CreateNamespaceRequest) + CreateNamespaceRequestOrBuilder { // Use CreateNamespaceRequest.newBuilder() to construct. - private CreateNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private CreateNamespaceRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CreateNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CreateNamespaceRequest defaultInstance; - public static CreateNamespaceRequest getDefaultInstance() { - return defaultInstance; } - - public CreateNamespaceRequest getDefaultInstanceForType() { - return defaultInstance; + private CreateNamespaceRequest() { + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CreateNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -17171,7 +18009,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -17182,30 +18020,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateNamespaceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CreateNamespaceRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CreateNamespaceRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_; /** @@ -17218,16 +18040,15 @@ public final class MasterProtos { * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } - // optional uint64 nonce_group = 2 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 2; private long nonceGroup_; /** @@ -17243,7 +18064,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 3 [default = 0]; public static final int NONCE_FIELD_NUMBER = 3; private long nonce_; /** @@ -17259,15 +18079,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespaceDescriptor()) { memoizedIsInitialized = 0; @@ -17283,9 +18099,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, namespaceDescriptor_); + output.writeMessage(1, getNamespaceDescriptor()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, nonceGroup_); @@ -17293,18 +18108,17 @@ public final class MasterProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, namespaceDescriptor_); + .computeMessageSize(1, getNamespaceDescriptor()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -17314,19 +18128,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -17352,12 +18160,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -17371,13 +18177,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -17405,46 +18213,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -17452,14 +18271,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.CreateNamespaceRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CreateNamespaceRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateNamespaceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -17472,23 +18292,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; } else { namespaceDescriptorBuilder_.clear(); } @@ -17500,10 +18317,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateNamespaceRequest_descriptor; @@ -17546,6 +18359,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest)other); @@ -17566,17 +18405,16 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespaceDescriptor()) { - return false; } if (!getNamespaceDescriptor().isInitialized()) { - return false; } return true; @@ -17591,7 +18429,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -17601,9 +18439,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; @@ -17616,7 +18453,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } else { return namespaceDescriptorBuilder_.getMessage(); } @@ -17657,6 +18494,7 @@ public final class MasterProtos { public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + namespaceDescriptor_ != null && namespaceDescriptor_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) { namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial(); @@ -17675,7 +18513,7 @@ public final class MasterProtos { */ public Builder clearNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; onChanged(); } else { namespaceDescriptorBuilder_.clear(); @@ -17698,19 +18536,20 @@ public final class MasterProtos { if (namespaceDescriptorBuilder_ != null) { return namespaceDescriptorBuilder_.getMessageOrBuilder(); } else { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } } /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( - namespaceDescriptor_, + getNamespaceDescriptor(), getParentForChildren(), isClean()); namespaceDescriptor_ = null; @@ -17718,7 +18557,6 @@ public final class MasterProtos { return namespaceDescriptorBuilder_; } - // optional uint64 nonce_group = 2 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 2 [default = 0]; @@ -17751,7 +18589,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 3 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 3 [default = 0]; @@ -17783,22 +18620,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CreateNamespaceRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.CreateNamespaceRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest DEFAULT_INSTANCE; static { - defaultInstance = new CreateNamespaceRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CreateNamespaceRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CreateNamespaceRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CreateNamespaceRequest) } - public interface CreateNamespaceResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CreateNamespaceResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CreateNamespaceResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -17811,36 +18685,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.CreateNamespaceResponse} */ - public static final class CreateNamespaceResponse extends - com.google.protobuf.GeneratedMessage - implements CreateNamespaceResponseOrBuilder { + public static final class CreateNamespaceResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CreateNamespaceResponse) + CreateNamespaceResponseOrBuilder { // Use CreateNamespaceResponse.newBuilder() to construct. - private CreateNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private CreateNamespaceResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CreateNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CreateNamespaceResponse defaultInstance; - public static CreateNamespaceResponse getDefaultInstance() { - return defaultInstance; - } - - public CreateNamespaceResponse getDefaultInstanceForType() { - return defaultInstance; + private CreateNamespaceResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CreateNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -17870,7 +18736,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -17881,30 +18747,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateNamespaceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CreateNamespaceResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CreateNamespaceResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -17920,13 +18770,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -17934,16 +18782,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -17951,19 +18797,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -17979,12 +18819,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -17994,9 +18832,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -18024,46 +18863,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -18071,14 +18921,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.CreateNamespaceResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CreateNamespaceResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateNamespaceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -18091,18 +18942,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -18110,10 +18958,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_CreateNamespaceResponse_descriptor; @@ -18144,6 +18988,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse)other); @@ -18158,7 +19028,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -18175,7 +19046,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -18185,7 +19056,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -18217,22 +19087,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CreateNamespaceResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.CreateNamespaceResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse DEFAULT_INSTANCE; static { - defaultInstance = new CreateNamespaceResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CreateNamespaceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CreateNamespaceResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CreateNamespaceResponse) } - public interface DeleteNamespaceRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteNamespaceRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteNamespaceRequest) + com.google.protobuf.MessageOrBuilder { - // required string namespaceName = 1; /** * required string namespaceName = 1; */ @@ -18247,7 +19154,6 @@ public final class MasterProtos { com.google.protobuf.ByteString getNamespaceNameBytes(); - // optional uint64 nonce_group = 2 [default = 0]; /** * optional uint64 nonce_group = 2 [default = 0]; */ @@ -18257,7 +19163,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 3 [default = 0]; /** * optional uint64 nonce = 3 [default = 0]; */ @@ -18270,36 +19175,30 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.DeleteNamespaceRequest} */ - public static final class DeleteNamespaceRequest extends - com.google.protobuf.GeneratedMessage - implements DeleteNamespaceRequestOrBuilder { + public static final class DeleteNamespaceRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteNamespaceRequest) + DeleteNamespaceRequestOrBuilder { // Use DeleteNamespaceRequest.newBuilder() to construct. - private DeleteNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteNamespaceRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DeleteNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteNamespaceRequest defaultInstance; - public static DeleteNamespaceRequest getDefaultInstance() { - return defaultInstance; } - - public DeleteNamespaceRequest getDefaultInstanceForType() { - return defaultInstance; + private DeleteNamespaceRequest() { + namespaceName_ = ""; + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -18319,8 +19218,9 @@ public final class MasterProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - namespaceName_ = input.readBytes(); + namespaceName_ = bs; break; } case 16: { @@ -18339,7 +19239,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -18350,32 +19250,16 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteNamespaceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteNamespaceRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteNamespaceRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string namespaceName = 1; public static final int NAMESPACENAME_FIELD_NUMBER = 1; - private java.lang.Object namespaceName_; + private volatile java.lang.Object namespaceName_; /** * required string namespaceName = 1; */ @@ -18416,7 +19300,6 @@ public final class MasterProtos { } } - // optional uint64 nonce_group = 2 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 2; private long nonceGroup_; /** @@ -18432,7 +19315,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 3 [default = 0]; public static final int NONCE_FIELD_NUMBER = 3; private long nonce_; /** @@ -18448,15 +19330,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - namespaceName_ = ""; - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespaceName()) { memoizedIsInitialized = 0; @@ -18468,9 +19346,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNamespaceNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, namespaceName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, nonceGroup_); @@ -18478,18 +19355,16 @@ public final class MasterProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNamespaceNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, namespaceName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -18499,19 +19374,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -18537,12 +19406,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -18556,13 +19423,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -18590,46 +19459,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -18637,14 +19517,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DeleteNamespaceRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteNamespaceRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteNamespaceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -18657,18 +19538,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); namespaceName_ = ""; @@ -18680,10 +19558,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteNamespaceRequest_descriptor; @@ -18722,6 +19596,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest)other); @@ -18744,13 +19644,13 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespaceName()) { - return false; } return true; @@ -18765,7 +19665,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -18775,7 +19675,6 @@ public final class MasterProtos { } private int bitField0_; - // required string namespaceName = 1; private java.lang.Object namespaceName_ = ""; /** * required string namespaceName = 1; @@ -18789,9 +19688,12 @@ public final class MasterProtos { public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - namespaceName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + namespaceName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -18849,7 +19751,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce_group = 2 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 2 [default = 0]; @@ -18882,7 +19783,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 3 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 3 [default = 0]; @@ -18914,22 +19814,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteNamespaceRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteNamespaceRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest DEFAULT_INSTANCE; static { - defaultInstance = new DeleteNamespaceRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteNamespaceRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteNamespaceRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteNamespaceRequest) } - public interface DeleteNamespaceResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteNamespaceResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteNamespaceResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -18942,36 +19879,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.DeleteNamespaceResponse} */ - public static final class DeleteNamespaceResponse extends - com.google.protobuf.GeneratedMessage - implements DeleteNamespaceResponseOrBuilder { + public static final class DeleteNamespaceResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteNamespaceResponse) + DeleteNamespaceResponseOrBuilder { // Use DeleteNamespaceResponse.newBuilder() to construct. - private DeleteNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteNamespaceResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private DeleteNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteNamespaceResponse defaultInstance; - public static DeleteNamespaceResponse getDefaultInstance() { - return defaultInstance; - } - - public DeleteNamespaceResponse getDefaultInstanceForType() { - return defaultInstance; + private DeleteNamespaceResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -19001,7 +19930,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -19012,30 +19941,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteNamespaceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteNamespaceResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteNamespaceResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -19051,13 +19964,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -19065,16 +19976,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -19082,19 +19991,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -19110,12 +20013,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -19125,9 +20026,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -19155,46 +20057,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -19202,14 +20115,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DeleteNamespaceResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteNamespaceResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteNamespaceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -19222,18 +20136,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -19241,10 +20152,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteNamespaceResponse_descriptor; @@ -19275,6 +20182,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse)other); @@ -19289,7 +20222,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -19306,7 +20240,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -19316,7 +20250,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -19348,22 +20281,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteNamespaceResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteNamespaceResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse DEFAULT_INSTANCE; static { - defaultInstance = new DeleteNamespaceResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteNamespaceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteNamespaceResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteNamespaceResponse) } - public interface ModifyNamespaceRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ModifyNamespaceRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ModifyNamespaceRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ @@ -19377,7 +20347,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(); - // optional uint64 nonce_group = 2 [default = 0]; /** * optional uint64 nonce_group = 2 [default = 0]; */ @@ -19387,7 +20356,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 3 [default = 0]; /** * optional uint64 nonce = 3 [default = 0]; */ @@ -19400,36 +20368,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ModifyNamespaceRequest} */ - public static final class ModifyNamespaceRequest extends - com.google.protobuf.GeneratedMessage - implements ModifyNamespaceRequestOrBuilder { + public static final class ModifyNamespaceRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ModifyNamespaceRequest) + ModifyNamespaceRequestOrBuilder { // Use ModifyNamespaceRequest.newBuilder() to construct. - private ModifyNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ModifyNamespaceRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ModifyNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ModifyNamespaceRequest defaultInstance; - public static ModifyNamespaceRequest getDefaultInstance() { - return defaultInstance; } - - public ModifyNamespaceRequest getDefaultInstanceForType() { - return defaultInstance; + private ModifyNamespaceRequest() { + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ModifyNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -19477,7 +20438,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -19488,30 +20449,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyNamespaceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ModifyNamespaceRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ModifyNamespaceRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_; /** @@ -19524,16 +20469,15 @@ public final class MasterProtos { * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } - // optional uint64 nonce_group = 2 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 2; private long nonceGroup_; /** @@ -19549,7 +20493,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 3 [default = 0]; public static final int NONCE_FIELD_NUMBER = 3; private long nonce_; /** @@ -19565,15 +20508,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespaceDescriptor()) { memoizedIsInitialized = 0; @@ -19589,9 +20528,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, namespaceDescriptor_); + output.writeMessage(1, getNamespaceDescriptor()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, nonceGroup_); @@ -19599,18 +20537,17 @@ public final class MasterProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, namespaceDescriptor_); + .computeMessageSize(1, getNamespaceDescriptor()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -19620,19 +20557,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -19658,12 +20589,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -19677,13 +20606,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -19711,46 +20642,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -19758,14 +20700,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ModifyNamespaceRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ModifyNamespaceRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyNamespaceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -19778,23 +20721,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; } else { namespaceDescriptorBuilder_.clear(); } @@ -19806,10 +20746,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyNamespaceRequest_descriptor; @@ -19852,6 +20788,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest)other); @@ -19872,17 +20834,16 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespaceDescriptor()) { - return false; } if (!getNamespaceDescriptor().isInitialized()) { - return false; } return true; @@ -19897,7 +20858,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -19907,9 +20868,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; @@ -19922,7 +20882,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } else { return namespaceDescriptorBuilder_.getMessage(); } @@ -19963,6 +20923,7 @@ public final class MasterProtos { public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + namespaceDescriptor_ != null && namespaceDescriptor_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) { namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial(); @@ -19981,7 +20942,7 @@ public final class MasterProtos { */ public Builder clearNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; onChanged(); } else { namespaceDescriptorBuilder_.clear(); @@ -20004,19 +20965,20 @@ public final class MasterProtos { if (namespaceDescriptorBuilder_ != null) { return namespaceDescriptorBuilder_.getMessageOrBuilder(); } else { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } } /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( - namespaceDescriptor_, + getNamespaceDescriptor(), getParentForChildren(), isClean()); namespaceDescriptor_ = null; @@ -20024,7 +20986,6 @@ public final class MasterProtos { return namespaceDescriptorBuilder_; } - // optional uint64 nonce_group = 2 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 2 [default = 0]; @@ -20057,7 +21018,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 3 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 3 [default = 0]; @@ -20089,22 +21049,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ModifyNamespaceRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ModifyNamespaceRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest DEFAULT_INSTANCE; static { - defaultInstance = new ModifyNamespaceRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ModifyNamespaceRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyNamespaceRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ModifyNamespaceRequest) } - public interface ModifyNamespaceResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ModifyNamespaceResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ModifyNamespaceResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint64 proc_id = 1; /** * optional uint64 proc_id = 1; */ @@ -20117,36 +21114,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ModifyNamespaceResponse} */ - public static final class ModifyNamespaceResponse extends - com.google.protobuf.GeneratedMessage - implements ModifyNamespaceResponseOrBuilder { + public static final class ModifyNamespaceResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ModifyNamespaceResponse) + ModifyNamespaceResponseOrBuilder { // Use ModifyNamespaceResponse.newBuilder() to construct. - private ModifyNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ModifyNamespaceResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ModifyNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ModifyNamespaceResponse defaultInstance; - public static ModifyNamespaceResponse getDefaultInstance() { - return defaultInstance; - } - - public ModifyNamespaceResponse getDefaultInstanceForType() { - return defaultInstance; + private ModifyNamespaceResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ModifyNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -20176,7 +21165,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -20187,30 +21176,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyNamespaceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ModifyNamespaceResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ModifyNamespaceResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -20226,13 +21199,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -20240,16 +21211,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -20257,19 +21226,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -20285,12 +21248,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -20300,9 +21261,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -20330,46 +21292,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -20377,14 +21350,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ModifyNamespaceResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ModifyNamespaceResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyNamespaceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -20397,18 +21371,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -20416,10 +21387,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ModifyNamespaceResponse_descriptor; @@ -20450,6 +21417,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse)other); @@ -20464,7 +21457,8 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -20481,7 +21475,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -20491,7 +21485,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint64 proc_id = 1; private long procId_ ; /** * optional uint64 proc_id = 1; @@ -20523,22 +21516,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ModifyNamespaceResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ModifyNamespaceResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse DEFAULT_INSTANCE; static { - defaultInstance = new ModifyNamespaceResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ModifyNamespaceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyNamespaceResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ModifyNamespaceResponse) } - public interface GetNamespaceDescriptorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetNamespaceDescriptorRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetNamespaceDescriptorRequest) + com.google.protobuf.MessageOrBuilder { - // required string namespaceName = 1; /** * required string namespaceName = 1; */ @@ -20556,36 +21586,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetNamespaceDescriptorRequest} */ - public static final class GetNamespaceDescriptorRequest extends - com.google.protobuf.GeneratedMessage - implements GetNamespaceDescriptorRequestOrBuilder { + public static final class GetNamespaceDescriptorRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetNamespaceDescriptorRequest) + GetNamespaceDescriptorRequestOrBuilder { // Use GetNamespaceDescriptorRequest.newBuilder() to construct. - private GetNamespaceDescriptorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetNamespaceDescriptorRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetNamespaceDescriptorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetNamespaceDescriptorRequest defaultInstance; - public static GetNamespaceDescriptorRequest getDefaultInstance() { - return defaultInstance; } - - public GetNamespaceDescriptorRequest getDefaultInstanceForType() { - return defaultInstance; + private GetNamespaceDescriptorRequest() { + namespaceName_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetNamespaceDescriptorRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -20605,8 +21627,9 @@ public final class MasterProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - namespaceName_ = input.readBytes(); + namespaceName_ = bs; break; } } @@ -20615,7 +21638,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -20626,32 +21649,16 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetNamespaceDescriptorRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetNamespaceDescriptorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetNamespaceDescriptorRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetNamespaceDescriptorRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string namespaceName = 1; public static final int NAMESPACENAME_FIELD_NUMBER = 1; - private java.lang.Object namespaceName_; + private volatile java.lang.Object namespaceName_; /** * required string namespaceName = 1; */ @@ -20692,13 +21699,11 @@ public final class MasterProtos { } } - private void initFields() { - namespaceName_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespaceName()) { memoizedIsInitialized = 0; @@ -20710,36 +21715,27 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNamespaceNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, namespaceName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNamespaceNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, namespaceName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -20755,12 +21751,10 @@ public final class MasterProtos { result = result && getNamespaceName() .equals(other.getNamespaceName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -20772,7 +21766,7 @@ public final class MasterProtos { hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER; hash = (53 * hash) + getNamespaceName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -20800,46 +21794,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -20847,14 +21852,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetNamespaceDescriptorRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetNamespaceDescriptorRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetNamespaceDescriptorRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetNamespaceDescriptorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -20867,18 +21873,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); namespaceName_ = ""; @@ -20886,10 +21889,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetNamespaceDescriptorRequest_descriptor; @@ -20920,6 +21919,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)other); @@ -20936,13 +21961,13 @@ public final class MasterProtos { namespaceName_ = other.namespaceName_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespaceName()) { - return false; } return true; @@ -20957,7 +21982,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -20967,7 +21992,6 @@ public final class MasterProtos { } private int bitField0_; - // required string namespaceName = 1; private java.lang.Object namespaceName_ = ""; /** * required string namespaceName = 1; @@ -20981,9 +22005,12 @@ public final class MasterProtos { public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - namespaceName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + namespaceName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -21040,22 +22067,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetNamespaceDescriptorRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetNamespaceDescriptorRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetNamespaceDescriptorRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetNamespaceDescriptorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetNamespaceDescriptorRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetNamespaceDescriptorRequest) } - public interface GetNamespaceDescriptorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetNamespaceDescriptorResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetNamespaceDescriptorResponse) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ @@ -21072,36 +22136,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetNamespaceDescriptorResponse} */ - public static final class GetNamespaceDescriptorResponse extends - com.google.protobuf.GeneratedMessage - implements GetNamespaceDescriptorResponseOrBuilder { + public static final class GetNamespaceDescriptorResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetNamespaceDescriptorResponse) + GetNamespaceDescriptorResponseOrBuilder { // Use GetNamespaceDescriptorResponse.newBuilder() to construct. - private GetNamespaceDescriptorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetNamespaceDescriptorResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetNamespaceDescriptorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetNamespaceDescriptorResponse defaultInstance; - public static GetNamespaceDescriptorResponse getDefaultInstance() { - return defaultInstance; + private GetNamespaceDescriptorResponse() { } - public GetNamespaceDescriptorResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetNamespaceDescriptorResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -21139,7 +22194,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -21150,30 +22205,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetNamespaceDescriptorResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetNamespaceDescriptorResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetNamespaceDescriptorResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetNamespaceDescriptorResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_; /** @@ -21186,22 +22225,20 @@ public final class MasterProtos { * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } - private void initFields() { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespaceDescriptor()) { memoizedIsInitialized = 0; @@ -21217,36 +22254,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, namespaceDescriptor_); + output.writeMessage(1, getNamespaceDescriptor()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, namespaceDescriptor_); + .computeMessageSize(1, getNamespaceDescriptor()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -21262,12 +22291,10 @@ public final class MasterProtos { result = result && getNamespaceDescriptor() .equals(other.getNamespaceDescriptor()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -21279,7 +22306,7 @@ public final class MasterProtos { hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER; hash = (53 * hash) + getNamespaceDescriptor().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -21307,46 +22334,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -21354,14 +22392,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetNamespaceDescriptorResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetNamespaceDescriptorResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetNamespaceDescriptorResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetNamespaceDescriptorResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -21374,23 +22413,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; } else { namespaceDescriptorBuilder_.clear(); } @@ -21398,10 +22434,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetNamespaceDescriptorResponse_descriptor; @@ -21436,6 +22468,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse)other); @@ -21450,17 +22508,16 @@ public final class MasterProtos { if (other.hasNamespaceDescriptor()) { mergeNamespaceDescriptor(other.getNamespaceDescriptor()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespaceDescriptor()) { - return false; } if (!getNamespaceDescriptor().isInitialized()) { - return false; } return true; @@ -21475,7 +22532,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -21485,9 +22542,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; @@ -21500,7 +22556,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } else { return namespaceDescriptorBuilder_.getMessage(); } @@ -21541,6 +22597,7 @@ public final class MasterProtos { public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + namespaceDescriptor_ != null && namespaceDescriptor_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) { namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial(); @@ -21559,7 +22616,7 @@ public final class MasterProtos { */ public Builder clearNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptor_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); + namespaceDescriptor_ = null; onChanged(); } else { namespaceDescriptorBuilder_.clear(); @@ -21582,73 +22639,103 @@ public final class MasterProtos { if (namespaceDescriptorBuilder_ != null) { return namespaceDescriptorBuilder_.getMessageOrBuilder(); } else { - return namespaceDescriptor_; + return namespaceDescriptor_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance() : namespaceDescriptor_; } } /** * required .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( - namespaceDescriptor_, + getNamespaceDescriptor(), getParentForChildren(), isClean()); namespaceDescriptor_ = null; } return namespaceDescriptorBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetNamespaceDescriptorResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetNamespaceDescriptorResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetNamespaceDescriptorResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetNamespaceDescriptorResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetNamespaceDescriptorResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetNamespaceDescriptorResponse) } - public interface ListNamespaceDescriptorsRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ListNamespaceDescriptorsRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ListNamespaceDescriptorsRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.ListNamespaceDescriptorsRequest} */ - public static final class ListNamespaceDescriptorsRequest extends - com.google.protobuf.GeneratedMessage - implements ListNamespaceDescriptorsRequestOrBuilder { + public static final class ListNamespaceDescriptorsRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ListNamespaceDescriptorsRequest) + ListNamespaceDescriptorsRequestOrBuilder { // Use ListNamespaceDescriptorsRequest.newBuilder() to construct. - private ListNamespaceDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ListNamespaceDescriptorsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ListNamespaceDescriptorsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ListNamespaceDescriptorsRequest defaultInstance; - public static ListNamespaceDescriptorsRequest getDefaultInstance() { - return defaultInstance; } - - public ListNamespaceDescriptorsRequest getDefaultInstanceForType() { - return defaultInstance; + private ListNamespaceDescriptorsRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ListNamespaceDescriptorsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -21672,7 +22759,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -21683,34 +22770,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListNamespaceDescriptorsRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListNamespaceDescriptorsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ListNamespaceDescriptorsRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ListNamespaceDescriptorsRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -21718,29 +22789,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -21751,12 +22814,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -21764,7 +22825,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -21792,46 +22853,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -21839,14 +22911,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ListNamespaceDescriptorsRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ListNamespaceDescriptorsRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListNamespaceDescriptorsRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListNamespaceDescriptorsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -21859,27 +22932,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListNamespaceDescriptorsRequest_descriptor; @@ -21903,6 +22969,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)other); @@ -21914,7 +23006,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -21931,7 +23024,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -21939,22 +23032,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ListNamespaceDescriptorsRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ListNamespaceDescriptorsRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest DEFAULT_INSTANCE; static { - defaultInstance = new ListNamespaceDescriptorsRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ListNamespaceDescriptorsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListNamespaceDescriptorsRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ListNamespaceDescriptorsRequest) } - public interface ListNamespaceDescriptorsResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ListNamespaceDescriptorsResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ListNamespaceDescriptorsResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; /** * repeated .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; */ @@ -21982,36 +23112,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ListNamespaceDescriptorsResponse} */ - public static final class ListNamespaceDescriptorsResponse extends - com.google.protobuf.GeneratedMessage - implements ListNamespaceDescriptorsResponseOrBuilder { + public static final class ListNamespaceDescriptorsResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ListNamespaceDescriptorsResponse) + ListNamespaceDescriptorsResponseOrBuilder { // Use ListNamespaceDescriptorsResponse.newBuilder() to construct. - private ListNamespaceDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ListNamespaceDescriptorsResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ListNamespaceDescriptorsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ListNamespaceDescriptorsResponse defaultInstance; - public static ListNamespaceDescriptorsResponse getDefaultInstance() { - return defaultInstance; } - - public ListNamespaceDescriptorsResponse getDefaultInstanceForType() { - return defaultInstance; + private ListNamespaceDescriptorsResponse() { + namespaceDescriptor_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ListNamespaceDescriptorsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -22035,7 +23157,8 @@ public final class MasterProtos { namespaceDescriptor_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - namespaceDescriptor_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.PARSER, extensionRegistry)); + namespaceDescriptor_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.PARSER, extensionRegistry)); break; } } @@ -22044,7 +23167,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { namespaceDescriptor_ = java.util.Collections.unmodifiableList(namespaceDescriptor_); @@ -22058,29 +23181,13 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListNamespaceDescriptorsResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListNamespaceDescriptorsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ListNamespaceDescriptorsResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ListNamespaceDescriptorsResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1; private java.util.List namespaceDescriptor_; /** @@ -22116,13 +23223,11 @@ public final class MasterProtos { return namespaceDescriptor_.get(index); } - private void initFields() { - namespaceDescriptor_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getNamespaceDescriptorCount(); i++) { if (!getNamespaceDescriptor(i).isInitialized()) { @@ -22136,16 +23241,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < namespaceDescriptor_.size(); i++) { output.writeMessage(1, namespaceDescriptor_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -22153,19 +23256,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, namespaceDescriptor_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -22178,12 +23275,10 @@ public final class MasterProtos { boolean result = true; result = result && getNamespaceDescriptorList() .equals(other.getNamespaceDescriptorList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -22195,7 +23290,7 @@ public final class MasterProtos { hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER; hash = (53 * hash) + getNamespaceDescriptorList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -22223,46 +23318,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -22270,14 +23376,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ListNamespaceDescriptorsResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ListNamespaceDescriptorsResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListNamespaceDescriptorsResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListNamespaceDescriptorsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -22290,19 +23397,16 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (namespaceDescriptorBuilder_ == null) { @@ -22314,10 +23418,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListNamespaceDescriptorsResponse_descriptor; @@ -22351,6 +23451,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse)other); @@ -22381,21 +23507,21 @@ public final class MasterProtos { namespaceDescriptor_ = other.namespaceDescriptor_; bitField0_ = (bitField0_ & ~0x00000001); namespaceDescriptorBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNamespaceDescriptorFieldBuilder() : null; } else { namespaceDescriptorBuilder_.addAllMessages(other.namespaceDescriptor_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getNamespaceDescriptorCount(); i++) { if (!getNamespaceDescriptor(i).isInitialized()) { - return false; } } @@ -22411,7 +23537,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -22421,7 +23547,6 @@ public final class MasterProtos { } private int bitField0_; - // repeated .hbase.pb.NamespaceDescriptor namespaceDescriptor = 1; private java.util.List namespaceDescriptor_ = java.util.Collections.emptyList(); private void ensureNamespaceDescriptorIsMutable() { @@ -22431,7 +23556,7 @@ public final class MasterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** @@ -22563,7 +23688,8 @@ public final class MasterProtos { java.lang.Iterable values) { if (namespaceDescriptorBuilder_ == null) { ensureNamespaceDescriptorIsMutable(); - super.addAll(values, namespaceDescriptor_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, namespaceDescriptor_); onChanged(); } else { namespaceDescriptorBuilder_.addAllMessages(values); @@ -22646,11 +23772,11 @@ public final class MasterProtos { getNamespaceDescriptorBuilderList() { return getNamespaceDescriptorFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { - namespaceDescriptorBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + namespaceDescriptorBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( namespaceDescriptor_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -22660,22 +23786,59 @@ public final class MasterProtos { } return namespaceDescriptorBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ListNamespaceDescriptorsResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ListNamespaceDescriptorsResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse DEFAULT_INSTANCE; static { - defaultInstance = new ListNamespaceDescriptorsResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ListNamespaceDescriptorsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListNamespaceDescriptorsResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ListNamespaceDescriptorsResponse) } - public interface ListTableDescriptorsByNamespaceRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ListTableDescriptorsByNamespaceRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ListTableDescriptorsByNamespaceRequest) + com.google.protobuf.MessageOrBuilder { - // required string namespaceName = 1; /** * required string namespaceName = 1; */ @@ -22693,36 +23856,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ListTableDescriptorsByNamespaceRequest} */ - public static final class ListTableDescriptorsByNamespaceRequest extends - com.google.protobuf.GeneratedMessage - implements ListTableDescriptorsByNamespaceRequestOrBuilder { + public static final class ListTableDescriptorsByNamespaceRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ListTableDescriptorsByNamespaceRequest) + ListTableDescriptorsByNamespaceRequestOrBuilder { // Use ListTableDescriptorsByNamespaceRequest.newBuilder() to construct. - private ListTableDescriptorsByNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ListTableDescriptorsByNamespaceRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ListTableDescriptorsByNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ListTableDescriptorsByNamespaceRequest defaultInstance; - public static ListTableDescriptorsByNamespaceRequest getDefaultInstance() { - return defaultInstance; - } - - public ListTableDescriptorsByNamespaceRequest getDefaultInstanceForType() { - return defaultInstance; + private ListTableDescriptorsByNamespaceRequest() { + namespaceName_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ListTableDescriptorsByNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -22742,8 +23897,9 @@ public final class MasterProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - namespaceName_ = input.readBytes(); + namespaceName_ = bs; break; } } @@ -22752,7 +23908,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -22763,32 +23919,16 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ListTableDescriptorsByNamespaceRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ListTableDescriptorsByNamespaceRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string namespaceName = 1; public static final int NAMESPACENAME_FIELD_NUMBER = 1; - private java.lang.Object namespaceName_; + private volatile java.lang.Object namespaceName_; /** * required string namespaceName = 1; */ @@ -22829,13 +23969,11 @@ public final class MasterProtos { } } - private void initFields() { - namespaceName_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespaceName()) { memoizedIsInitialized = 0; @@ -22847,36 +23985,27 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNamespaceNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, namespaceName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNamespaceNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, namespaceName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -22892,12 +24021,10 @@ public final class MasterProtos { result = result && getNamespaceName() .equals(other.getNamespaceName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -22909,7 +24036,7 @@ public final class MasterProtos { hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER; hash = (53 * hash) + getNamespaceName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -22937,46 +24064,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -22984,14 +24122,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ListTableDescriptorsByNamespaceRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ListTableDescriptorsByNamespaceRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -23004,18 +24143,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); namespaceName_ = ""; @@ -23023,10 +24159,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_descriptor; @@ -23057,6 +24189,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)other); @@ -23073,13 +24231,13 @@ public final class MasterProtos { namespaceName_ = other.namespaceName_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespaceName()) { - return false; } return true; @@ -23094,7 +24252,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -23104,7 +24262,6 @@ public final class MasterProtos { } private int bitField0_; - // required string namespaceName = 1; private java.lang.Object namespaceName_ = ""; /** * required string namespaceName = 1; @@ -23118,9 +24275,12 @@ public final class MasterProtos { public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - namespaceName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + namespaceName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -23177,22 +24337,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ListTableDescriptorsByNamespaceRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ListTableDescriptorsByNamespaceRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest DEFAULT_INSTANCE; static { - defaultInstance = new ListTableDescriptorsByNamespaceRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ListTableDescriptorsByNamespaceRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListTableDescriptorsByNamespaceRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ListTableDescriptorsByNamespaceRequest) } - public interface ListTableDescriptorsByNamespaceResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ListTableDescriptorsByNamespaceResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ListTableDescriptorsByNamespaceResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.TableSchema tableSchema = 1; /** * repeated .hbase.pb.TableSchema tableSchema = 1; */ @@ -23220,36 +24417,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ListTableDescriptorsByNamespaceResponse} */ - public static final class ListTableDescriptorsByNamespaceResponse extends - com.google.protobuf.GeneratedMessage - implements ListTableDescriptorsByNamespaceResponseOrBuilder { + public static final class ListTableDescriptorsByNamespaceResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ListTableDescriptorsByNamespaceResponse) + ListTableDescriptorsByNamespaceResponseOrBuilder { // Use ListTableDescriptorsByNamespaceResponse.newBuilder() to construct. - private ListTableDescriptorsByNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ListTableDescriptorsByNamespaceResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ListTableDescriptorsByNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ListTableDescriptorsByNamespaceResponse defaultInstance; - public static ListTableDescriptorsByNamespaceResponse getDefaultInstance() { - return defaultInstance; } - - public ListTableDescriptorsByNamespaceResponse getDefaultInstanceForType() { - return defaultInstance; + private ListTableDescriptorsByNamespaceResponse() { + tableSchema_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ListTableDescriptorsByNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -23273,7 +24462,8 @@ public final class MasterProtos { tableSchema_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - tableSchema_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry)); + tableSchema_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry)); break; } } @@ -23282,7 +24472,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); @@ -23296,29 +24486,13 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ListTableDescriptorsByNamespaceResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ListTableDescriptorsByNamespaceResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.TableSchema tableSchema = 1; public static final int TABLESCHEMA_FIELD_NUMBER = 1; private java.util.List tableSchema_; /** @@ -23354,13 +24528,11 @@ public final class MasterProtos { return tableSchema_.get(index); } - private void initFields() { - tableSchema_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { @@ -23374,16 +24546,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < tableSchema_.size(); i++) { output.writeMessage(1, tableSchema_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -23391,19 +24561,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableSchema_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -23416,12 +24580,10 @@ public final class MasterProtos { boolean result = true; result = result && getTableSchemaList() .equals(other.getTableSchemaList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -23433,7 +24595,7 @@ public final class MasterProtos { hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; hash = (53 * hash) + getTableSchemaList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -23461,46 +24623,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -23508,14 +24681,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ListTableDescriptorsByNamespaceResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ListTableDescriptorsByNamespaceResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -23528,19 +24702,16 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableSchemaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { @@ -23552,10 +24723,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_descriptor; @@ -23589,6 +24756,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse)other); @@ -23619,21 +24812,21 @@ public final class MasterProtos { tableSchema_ = other.tableSchema_; bitField0_ = (bitField0_ & ~0x00000001); tableSchemaBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTableSchemaFieldBuilder() : null; } else { tableSchemaBuilder_.addAllMessages(other.tableSchema_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { - return false; } } @@ -23649,7 +24842,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -23659,7 +24852,6 @@ public final class MasterProtos { } private int bitField0_; - // repeated .hbase.pb.TableSchema tableSchema = 1; private java.util.List tableSchema_ = java.util.Collections.emptyList(); private void ensureTableSchemaIsMutable() { @@ -23669,7 +24861,7 @@ public final class MasterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** @@ -23801,7 +24993,8 @@ public final class MasterProtos { java.lang.Iterable values) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); - super.addAll(values, tableSchema_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, tableSchema_); onChanged(); } else { tableSchemaBuilder_.addAllMessages(values); @@ -23884,11 +25077,11 @@ public final class MasterProtos { getTableSchemaBuilderList() { return getTableSchemaFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( tableSchema_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -23898,22 +25091,59 @@ public final class MasterProtos { } return tableSchemaBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ListTableDescriptorsByNamespaceResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ListTableDescriptorsByNamespaceResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse DEFAULT_INSTANCE; static { - defaultInstance = new ListTableDescriptorsByNamespaceResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ListTableDescriptorsByNamespaceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListTableDescriptorsByNamespaceResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ListTableDescriptorsByNamespaceResponse) } - public interface ListTableNamesByNamespaceRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ListTableNamesByNamespaceRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ListTableNamesByNamespaceRequest) + com.google.protobuf.MessageOrBuilder { - // required string namespaceName = 1; /** * required string namespaceName = 1; */ @@ -23931,36 +25161,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ListTableNamesByNamespaceRequest} */ - public static final class ListTableNamesByNamespaceRequest extends - com.google.protobuf.GeneratedMessage - implements ListTableNamesByNamespaceRequestOrBuilder { + public static final class ListTableNamesByNamespaceRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ListTableNamesByNamespaceRequest) + ListTableNamesByNamespaceRequestOrBuilder { // Use ListTableNamesByNamespaceRequest.newBuilder() to construct. - private ListTableNamesByNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ListTableNamesByNamespaceRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ListTableNamesByNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ListTableNamesByNamespaceRequest defaultInstance; - public static ListTableNamesByNamespaceRequest getDefaultInstance() { - return defaultInstance; - } - - public ListTableNamesByNamespaceRequest getDefaultInstanceForType() { - return defaultInstance; + private ListTableNamesByNamespaceRequest() { + namespaceName_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ListTableNamesByNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -23980,8 +25202,9 @@ public final class MasterProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - namespaceName_ = input.readBytes(); + namespaceName_ = bs; break; } } @@ -23990,7 +25213,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -24001,32 +25224,16 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableNamesByNamespaceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableNamesByNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ListTableNamesByNamespaceRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ListTableNamesByNamespaceRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string namespaceName = 1; public static final int NAMESPACENAME_FIELD_NUMBER = 1; - private java.lang.Object namespaceName_; + private volatile java.lang.Object namespaceName_; /** * required string namespaceName = 1; */ @@ -24067,13 +25274,11 @@ public final class MasterProtos { } } - private void initFields() { - namespaceName_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNamespaceName()) { memoizedIsInitialized = 0; @@ -24085,36 +25290,27 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNamespaceNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, namespaceName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNamespaceNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, namespaceName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -24130,12 +25326,10 @@ public final class MasterProtos { result = result && getNamespaceName() .equals(other.getNamespaceName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -24147,7 +25341,7 @@ public final class MasterProtos { hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER; hash = (53 * hash) + getNamespaceName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -24175,46 +25369,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -24222,14 +25427,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ListTableNamesByNamespaceRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ListTableNamesByNamespaceRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableNamesByNamespaceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableNamesByNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -24242,18 +25448,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); namespaceName_ = ""; @@ -24261,10 +25464,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableNamesByNamespaceRequest_descriptor; @@ -24295,6 +25494,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)other); @@ -24311,13 +25536,13 @@ public final class MasterProtos { namespaceName_ = other.namespaceName_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespaceName()) { - return false; } return true; @@ -24332,7 +25557,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -24342,7 +25567,6 @@ public final class MasterProtos { } private int bitField0_; - // required string namespaceName = 1; private java.lang.Object namespaceName_ = ""; /** * required string namespaceName = 1; @@ -24356,9 +25580,12 @@ public final class MasterProtos { public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - namespaceName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + namespaceName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -24415,22 +25642,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ListTableNamesByNamespaceRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ListTableNamesByNamespaceRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest DEFAULT_INSTANCE; static { - defaultInstance = new ListTableNamesByNamespaceRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ListTableNamesByNamespaceRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListTableNamesByNamespaceRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ListTableNamesByNamespaceRequest) } - public interface ListTableNamesByNamespaceResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ListTableNamesByNamespaceResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ListTableNamesByNamespaceResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.TableName tableName = 1; /** * repeated .hbase.pb.TableName tableName = 1; */ @@ -24458,36 +25722,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ListTableNamesByNamespaceResponse} */ - public static final class ListTableNamesByNamespaceResponse extends - com.google.protobuf.GeneratedMessage - implements ListTableNamesByNamespaceResponseOrBuilder { + public static final class ListTableNamesByNamespaceResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ListTableNamesByNamespaceResponse) + ListTableNamesByNamespaceResponseOrBuilder { // Use ListTableNamesByNamespaceResponse.newBuilder() to construct. - private ListTableNamesByNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ListTableNamesByNamespaceResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ListTableNamesByNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ListTableNamesByNamespaceResponse defaultInstance; - public static ListTableNamesByNamespaceResponse getDefaultInstance() { - return defaultInstance; - } - - public ListTableNamesByNamespaceResponse getDefaultInstanceForType() { - return defaultInstance; + private ListTableNamesByNamespaceResponse() { + tableName_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ListTableNamesByNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -24511,7 +25767,8 @@ public final class MasterProtos { tableName_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - tableName_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry)); + tableName_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry)); break; } } @@ -24520,7 +25777,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableName_ = java.util.Collections.unmodifiableList(tableName_); @@ -24534,29 +25791,13 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableNamesByNamespaceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableNamesByNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ListTableNamesByNamespaceResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ListTableNamesByNamespaceResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.TableName tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private java.util.List tableName_; /** @@ -24592,13 +25833,11 @@ public final class MasterProtos { return tableName_.get(index); } - private void initFields() { - tableName_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getTableNameCount(); i++) { if (!getTableName(i).isInitialized()) { @@ -24612,16 +25851,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < tableName_.size(); i++) { output.writeMessage(1, tableName_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -24629,19 +25866,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -24654,12 +25885,10 @@ public final class MasterProtos { boolean result = true; result = result && getTableNameList() .equals(other.getTableNameList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -24671,7 +25900,7 @@ public final class MasterProtos { hash = (37 * hash) + TABLENAME_FIELD_NUMBER; hash = (53 * hash) + getTableNameList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -24699,46 +25928,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -24746,14 +25986,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ListTableNamesByNamespaceResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ListTableNamesByNamespaceResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableNamesByNamespaceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableNamesByNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -24766,19 +26007,16 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { @@ -24790,10 +26028,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListTableNamesByNamespaceResponse_descriptor; @@ -24827,6 +26061,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse)other); @@ -24857,21 +26117,21 @@ public final class MasterProtos { tableName_ = other.tableName_; bitField0_ = (bitField0_ & ~0x00000001); tableNameBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTableNameFieldBuilder() : null; } else { tableNameBuilder_.addAllMessages(other.tableName_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getTableNameCount(); i++) { if (!getTableName(i).isInitialized()) { - return false; } } @@ -24887,7 +26147,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -24897,7 +26157,6 @@ public final class MasterProtos { } private int bitField0_; - // repeated .hbase.pb.TableName tableName = 1; private java.util.List tableName_ = java.util.Collections.emptyList(); private void ensureTableNameIsMutable() { @@ -24907,7 +26166,7 @@ public final class MasterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** @@ -25039,7 +26298,8 @@ public final class MasterProtos { java.lang.Iterable values) { if (tableNameBuilder_ == null) { ensureTableNameIsMutable(); - super.addAll(values, tableName_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, tableName_); onChanged(); } else { tableNameBuilder_.addAllMessages(values); @@ -25122,11 +26382,11 @@ public final class MasterProtos { getTableNameBuilderList() { return getTableNameFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -25136,54 +26396,83 @@ public final class MasterProtos { } return tableNameBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ListTableNamesByNamespaceResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ListTableNamesByNamespaceResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse DEFAULT_INSTANCE; static { - defaultInstance = new ListTableNamesByNamespaceResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ListTableNamesByNamespaceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListTableNamesByNamespaceResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ListTableNamesByNamespaceResponse) } - public interface ShutdownRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ShutdownRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ShutdownRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.ShutdownRequest} */ - public static final class ShutdownRequest extends - com.google.protobuf.GeneratedMessage - implements ShutdownRequestOrBuilder { + public static final class ShutdownRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ShutdownRequest) + ShutdownRequestOrBuilder { // Use ShutdownRequest.newBuilder() to construct. - private ShutdownRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ShutdownRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ShutdownRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ShutdownRequest defaultInstance; - public static ShutdownRequest getDefaultInstance() { - return defaultInstance; - } - - public ShutdownRequest getDefaultInstanceForType() { - return defaultInstance; + private ShutdownRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ShutdownRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -25207,7 +26496,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -25218,34 +26507,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ShutdownRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ShutdownRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ShutdownRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ShutdownRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -25253,29 +26526,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -25286,12 +26551,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -25299,7 +26562,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -25327,46 +26590,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -25374,14 +26648,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ShutdownRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ShutdownRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ShutdownRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ShutdownRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -25394,27 +26669,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ShutdownRequest_descriptor; @@ -25438,6 +26706,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest)other); @@ -25449,7 +26743,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -25466,7 +26761,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -25474,54 +26769,83 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ShutdownRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ShutdownRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest DEFAULT_INSTANCE; static { - defaultInstance = new ShutdownRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ShutdownRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ShutdownRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ShutdownRequest) } - public interface ShutdownResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ShutdownResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ShutdownResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.ShutdownResponse} */ - public static final class ShutdownResponse extends - com.google.protobuf.GeneratedMessage - implements ShutdownResponseOrBuilder { + public static final class ShutdownResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ShutdownResponse) + ShutdownResponseOrBuilder { // Use ShutdownResponse.newBuilder() to construct. - private ShutdownResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ShutdownResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ShutdownResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ShutdownResponse defaultInstance; - public static ShutdownResponse getDefaultInstance() { - return defaultInstance; } - - public ShutdownResponse getDefaultInstanceForType() { - return defaultInstance; + private ShutdownResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ShutdownResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -25545,7 +26869,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -25556,34 +26880,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ShutdownResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ShutdownResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ShutdownResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ShutdownResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -25591,29 +26899,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -25624,12 +26924,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -25637,7 +26935,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -25665,46 +26963,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -25712,14 +27021,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ShutdownResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ShutdownResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ShutdownResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ShutdownResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -25732,27 +27042,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ShutdownResponse_descriptor; @@ -25776,6 +27079,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse)other); @@ -25787,7 +27116,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -25804,7 +27134,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -25812,54 +27142,83 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ShutdownResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ShutdownResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse DEFAULT_INSTANCE; static { - defaultInstance = new ShutdownResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ShutdownResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ShutdownResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ShutdownResponse) } - public interface StopMasterRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StopMasterRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.StopMasterRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.StopMasterRequest} */ - public static final class StopMasterRequest extends - com.google.protobuf.GeneratedMessage - implements StopMasterRequestOrBuilder { + public static final class StopMasterRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.StopMasterRequest) + StopMasterRequestOrBuilder { // Use StopMasterRequest.newBuilder() to construct. - private StopMasterRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private StopMasterRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StopMasterRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StopMasterRequest defaultInstance; - public static StopMasterRequest getDefaultInstance() { - return defaultInstance; } - - public StopMasterRequest getDefaultInstanceForType() { - return defaultInstance; + private StopMasterRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StopMasterRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -25883,7 +27242,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -25894,34 +27253,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_StopMasterRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_StopMasterRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StopMasterRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StopMasterRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -25929,29 +27272,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -25962,12 +27297,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -25975,7 +27308,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -26003,46 +27336,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -26050,14 +27394,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.StopMasterRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.StopMasterRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_StopMasterRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_StopMasterRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -26070,27 +27415,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_StopMasterRequest_descriptor; @@ -26114,6 +27452,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest)other); @@ -26125,7 +27489,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -26142,7 +27507,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -26150,54 +27515,83 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.StopMasterRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.StopMasterRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest DEFAULT_INSTANCE; static { - defaultInstance = new StopMasterRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StopMasterRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StopMasterRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.StopMasterRequest) } - public interface StopMasterResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StopMasterResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.StopMasterResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.StopMasterResponse} */ - public static final class StopMasterResponse extends - com.google.protobuf.GeneratedMessage - implements StopMasterResponseOrBuilder { + public static final class StopMasterResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.StopMasterResponse) + StopMasterResponseOrBuilder { // Use StopMasterResponse.newBuilder() to construct. - private StopMasterResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private StopMasterResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StopMasterResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StopMasterResponse defaultInstance; - public static StopMasterResponse getDefaultInstance() { - return defaultInstance; } - - public StopMasterResponse getDefaultInstanceForType() { - return defaultInstance; + private StopMasterResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StopMasterResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -26221,7 +27615,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -26232,34 +27626,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_StopMasterResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_StopMasterResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StopMasterResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StopMasterResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -26267,29 +27645,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -26300,12 +27670,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -26313,7 +27681,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -26341,46 +27709,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -26388,14 +27767,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.StopMasterResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.StopMasterResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_StopMasterResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_StopMasterResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -26408,27 +27788,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_StopMasterResponse_descriptor; @@ -26452,6 +27825,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse)other); @@ -26463,7 +27862,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -26480,7 +27880,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -26488,54 +27888,83 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.StopMasterResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.StopMasterResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse DEFAULT_INSTANCE; static { - defaultInstance = new StopMasterResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StopMasterResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StopMasterResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.StopMasterResponse) } - public interface IsInMaintenanceModeRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsInMaintenanceModeRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsInMaintenanceModeRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.IsInMaintenanceModeRequest} */ - public static final class IsInMaintenanceModeRequest extends - com.google.protobuf.GeneratedMessage - implements IsInMaintenanceModeRequestOrBuilder { + public static final class IsInMaintenanceModeRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsInMaintenanceModeRequest) + IsInMaintenanceModeRequestOrBuilder { // Use IsInMaintenanceModeRequest.newBuilder() to construct. - private IsInMaintenanceModeRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsInMaintenanceModeRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IsInMaintenanceModeRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsInMaintenanceModeRequest defaultInstance; - public static IsInMaintenanceModeRequest getDefaultInstance() { - return defaultInstance; } - - public IsInMaintenanceModeRequest getDefaultInstanceForType() { - return defaultInstance; + private IsInMaintenanceModeRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsInMaintenanceModeRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -26559,7 +27988,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -26570,34 +27999,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsInMaintenanceModeRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsInMaintenanceModeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsInMaintenanceModeRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsInMaintenanceModeRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -26605,29 +28018,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -26638,12 +28043,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -26651,7 +28054,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -26679,46 +28082,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -26726,14 +28140,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsInMaintenanceModeRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsInMaintenanceModeRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsInMaintenanceModeRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsInMaintenanceModeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -26746,27 +28161,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsInMaintenanceModeRequest_descriptor; @@ -26790,6 +28198,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest)other); @@ -26801,7 +28235,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -26818,7 +28253,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -26826,22 +28261,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsInMaintenanceModeRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsInMaintenanceModeRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest DEFAULT_INSTANCE; static { - defaultInstance = new IsInMaintenanceModeRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsInMaintenanceModeRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsInMaintenanceModeRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsInMaintenanceModeRequest) } - public interface IsInMaintenanceModeResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsInMaintenanceModeResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsInMaintenanceModeResponse) + com.google.protobuf.MessageOrBuilder { - // required bool inMaintenanceMode = 1; /** * required bool inMaintenanceMode = 1; */ @@ -26854,36 +28326,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsInMaintenanceModeResponse} */ - public static final class IsInMaintenanceModeResponse extends - com.google.protobuf.GeneratedMessage - implements IsInMaintenanceModeResponseOrBuilder { + public static final class IsInMaintenanceModeResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsInMaintenanceModeResponse) + IsInMaintenanceModeResponseOrBuilder { // Use IsInMaintenanceModeResponse.newBuilder() to construct. - private IsInMaintenanceModeResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsInMaintenanceModeResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IsInMaintenanceModeResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsInMaintenanceModeResponse defaultInstance; - public static IsInMaintenanceModeResponse getDefaultInstance() { - return defaultInstance; } - - public IsInMaintenanceModeResponse getDefaultInstanceForType() { - return defaultInstance; + private IsInMaintenanceModeResponse() { + inMaintenanceMode_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsInMaintenanceModeResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -26913,7 +28377,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -26924,30 +28388,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsInMaintenanceModeResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsInMaintenanceModeResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsInMaintenanceModeResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsInMaintenanceModeResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool inMaintenanceMode = 1; public static final int INMAINTENANCEMODE_FIELD_NUMBER = 1; private boolean inMaintenanceMode_; /** @@ -26963,13 +28411,11 @@ public final class MasterProtos { return inMaintenanceMode_; } - private void initFields() { - inMaintenanceMode_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasInMaintenanceMode()) { memoizedIsInitialized = 0; @@ -26981,16 +28427,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, inMaintenanceMode_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -26998,19 +28442,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, inMaintenanceMode_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -27026,12 +28464,10 @@ public final class MasterProtos { result = result && (getInMaintenanceMode() == other.getInMaintenanceMode()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -27041,9 +28477,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasInMaintenanceMode()) { hash = (37 * hash) + INMAINTENANCEMODE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getInMaintenanceMode()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getInMaintenanceMode()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -27071,46 +28508,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -27118,14 +28566,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsInMaintenanceModeResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsInMaintenanceModeResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsInMaintenanceModeResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsInMaintenanceModeResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -27138,18 +28587,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); inMaintenanceMode_ = false; @@ -27157,10 +28603,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsInMaintenanceModeResponse_descriptor; @@ -27191,6 +28633,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse)other); @@ -27205,13 +28673,13 @@ public final class MasterProtos { if (other.hasInMaintenanceMode()) { setInMaintenanceMode(other.getInMaintenanceMode()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasInMaintenanceMode()) { - return false; } return true; @@ -27226,7 +28694,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -27236,7 +28704,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool inMaintenanceMode = 1; private boolean inMaintenanceMode_ ; /** * required bool inMaintenanceMode = 1; @@ -27268,22 +28735,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsInMaintenanceModeResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsInMaintenanceModeResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse DEFAULT_INSTANCE; static { - defaultInstance = new IsInMaintenanceModeResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsInMaintenanceModeResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsInMaintenanceModeResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsInMaintenanceModeResponse) } - public interface BalanceRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface BalanceRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BalanceRequest) + com.google.protobuf.MessageOrBuilder { - // optional bool force = 1; /** * optional bool force = 1; */ @@ -27296,36 +28800,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.BalanceRequest} */ - public static final class BalanceRequest extends - com.google.protobuf.GeneratedMessage - implements BalanceRequestOrBuilder { + public static final class BalanceRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BalanceRequest) + BalanceRequestOrBuilder { // Use BalanceRequest.newBuilder() to construct. - private BalanceRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private BalanceRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private BalanceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BalanceRequest defaultInstance; - public static BalanceRequest getDefaultInstance() { - return defaultInstance; } - - public BalanceRequest getDefaultInstanceForType() { - return defaultInstance; + private BalanceRequest() { + force_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private BalanceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -27355,7 +28851,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -27366,30 +28862,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_BalanceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_BalanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BalanceRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BalanceRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool force = 1; public static final int FORCE_FIELD_NUMBER = 1; private boolean force_; /** @@ -27405,13 +28885,11 @@ public final class MasterProtos { return force_; } - private void initFields() { - force_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -27419,16 +28897,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, force_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -27436,19 +28912,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, force_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -27464,12 +28934,10 @@ public final class MasterProtos { result = result && (getForce() == other.getForce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -27479,9 +28947,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasForce()) { hash = (37 * hash) + FORCE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getForce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getForce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -27509,46 +28978,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -27556,14 +29036,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.BalanceRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BalanceRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_BalanceRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_BalanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -27576,18 +29057,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); force_ = false; @@ -27595,10 +29073,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_BalanceRequest_descriptor; @@ -27629,6 +29103,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest)other); @@ -27643,7 +29143,8 @@ public final class MasterProtos { if (other.hasForce()) { setForce(other.getForce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -27660,7 +29161,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -27670,7 +29171,6 @@ public final class MasterProtos { } private int bitField0_; - // optional bool force = 1; private boolean force_ ; /** * optional bool force = 1; @@ -27702,22 +29202,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BalanceRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.BalanceRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest DEFAULT_INSTANCE; static { - defaultInstance = new BalanceRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public BalanceRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BalanceRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BalanceRequest) } - public interface BalanceResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface BalanceResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BalanceResponse) + com.google.protobuf.MessageOrBuilder { - // required bool balancer_ran = 1; /** * required bool balancer_ran = 1; */ @@ -27730,36 +29267,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.BalanceResponse} */ - public static final class BalanceResponse extends - com.google.protobuf.GeneratedMessage - implements BalanceResponseOrBuilder { + public static final class BalanceResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BalanceResponse) + BalanceResponseOrBuilder { // Use BalanceResponse.newBuilder() to construct. - private BalanceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private BalanceResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private BalanceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BalanceResponse defaultInstance; - public static BalanceResponse getDefaultInstance() { - return defaultInstance; - } - - public BalanceResponse getDefaultInstanceForType() { - return defaultInstance; + private BalanceResponse() { + balancerRan_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private BalanceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -27789,7 +29318,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -27800,30 +29329,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_BalanceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_BalanceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BalanceResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BalanceResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool balancer_ran = 1; public static final int BALANCER_RAN_FIELD_NUMBER = 1; private boolean balancerRan_; /** @@ -27839,13 +29352,11 @@ public final class MasterProtos { return balancerRan_; } - private void initFields() { - balancerRan_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasBalancerRan()) { memoizedIsInitialized = 0; @@ -27857,16 +29368,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, balancerRan_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -27874,19 +29383,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, balancerRan_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -27902,12 +29405,10 @@ public final class MasterProtos { result = result && (getBalancerRan() == other.getBalancerRan()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -27917,9 +29418,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBalancerRan()) { hash = (37 * hash) + BALANCER_RAN_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getBalancerRan()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getBalancerRan()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -27947,46 +29449,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -27994,14 +29507,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.BalanceResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BalanceResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_BalanceResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_BalanceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -28014,18 +29528,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); balancerRan_ = false; @@ -28033,10 +29544,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_BalanceResponse_descriptor; @@ -28067,6 +29574,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse)other); @@ -28081,13 +29614,13 @@ public final class MasterProtos { if (other.hasBalancerRan()) { setBalancerRan(other.getBalancerRan()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasBalancerRan()) { - return false; } return true; @@ -28102,7 +29635,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -28112,7 +29645,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool balancer_ran = 1; private boolean balancerRan_ ; /** * required bool balancer_ran = 1; @@ -28144,22 +29676,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BalanceResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.BalanceResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse DEFAULT_INSTANCE; static { - defaultInstance = new BalanceResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public BalanceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BalanceResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BalanceResponse) } - public interface SetBalancerRunningRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SetBalancerRunningRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SetBalancerRunningRequest) + com.google.protobuf.MessageOrBuilder { - // required bool on = 1; /** * required bool on = 1; */ @@ -28169,7 +29738,6 @@ public final class MasterProtos { */ boolean getOn(); - // optional bool synchronous = 2; /** * optional bool synchronous = 2; */ @@ -28182,36 +29750,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.SetBalancerRunningRequest} */ - public static final class SetBalancerRunningRequest extends - com.google.protobuf.GeneratedMessage - implements SetBalancerRunningRequestOrBuilder { + public static final class SetBalancerRunningRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SetBalancerRunningRequest) + SetBalancerRunningRequestOrBuilder { // Use SetBalancerRunningRequest.newBuilder() to construct. - private SetBalancerRunningRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private SetBalancerRunningRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private SetBalancerRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SetBalancerRunningRequest defaultInstance; - public static SetBalancerRunningRequest getDefaultInstance() { - return defaultInstance; - } - - public SetBalancerRunningRequest getDefaultInstanceForType() { - return defaultInstance; + private SetBalancerRunningRequest() { + on_ = false; + synchronous_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SetBalancerRunningRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -28246,7 +29807,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -28257,30 +29818,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetBalancerRunningRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetBalancerRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetBalancerRunningRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SetBalancerRunningRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool on = 1; public static final int ON_FIELD_NUMBER = 1; private boolean on_; /** @@ -28296,7 +29841,6 @@ public final class MasterProtos { return on_; } - // optional bool synchronous = 2; public static final int SYNCHRONOUS_FIELD_NUMBER = 2; private boolean synchronous_; /** @@ -28312,14 +29856,11 @@ public final class MasterProtos { return synchronous_; } - private void initFields() { - on_ = false; - synchronous_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasOn()) { memoizedIsInitialized = 0; @@ -28331,19 +29872,17 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, on_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, synchronous_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -28355,19 +29894,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, synchronous_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -28388,12 +29921,10 @@ public final class MasterProtos { result = result && (getSynchronous() == other.getSynchronous()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -28403,13 +29934,15 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasOn()) { hash = (37 * hash) + ON_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getOn()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getOn()); } if (hasSynchronous()) { hash = (37 * hash) + SYNCHRONOUS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getSynchronous()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getSynchronous()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -28437,46 +29970,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -28484,14 +30028,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SetBalancerRunningRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SetBalancerRunningRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetBalancerRunningRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetBalancerRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -28504,18 +30049,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); on_ = false; @@ -28525,10 +30067,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetBalancerRunningRequest_descriptor; @@ -28563,6 +30101,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest)other); @@ -28580,13 +30144,13 @@ public final class MasterProtos { if (other.hasSynchronous()) { setSynchronous(other.getSynchronous()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasOn()) { - return false; } return true; @@ -28601,7 +30165,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -28611,7 +30175,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool on = 1; private boolean on_ ; /** * required bool on = 1; @@ -28644,7 +30207,6 @@ public final class MasterProtos { return this; } - // optional bool synchronous = 2; private boolean synchronous_ ; /** * optional bool synchronous = 2; @@ -28676,22 +30238,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SetBalancerRunningRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.SetBalancerRunningRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest DEFAULT_INSTANCE; static { - defaultInstance = new SetBalancerRunningRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SetBalancerRunningRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetBalancerRunningRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SetBalancerRunningRequest) } - public interface SetBalancerRunningResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SetBalancerRunningResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SetBalancerRunningResponse) + com.google.protobuf.MessageOrBuilder { - // optional bool prev_balance_value = 1; /** * optional bool prev_balance_value = 1; */ @@ -28704,36 +30303,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.SetBalancerRunningResponse} */ - public static final class SetBalancerRunningResponse extends - com.google.protobuf.GeneratedMessage - implements SetBalancerRunningResponseOrBuilder { + public static final class SetBalancerRunningResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SetBalancerRunningResponse) + SetBalancerRunningResponseOrBuilder { // Use SetBalancerRunningResponse.newBuilder() to construct. - private SetBalancerRunningResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private SetBalancerRunningResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SetBalancerRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SetBalancerRunningResponse defaultInstance; - public static SetBalancerRunningResponse getDefaultInstance() { - return defaultInstance; } - - public SetBalancerRunningResponse getDefaultInstanceForType() { - return defaultInstance; + private SetBalancerRunningResponse() { + prevBalanceValue_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SetBalancerRunningResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -28763,7 +30354,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -28774,30 +30365,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetBalancerRunningResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetBalancerRunningResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetBalancerRunningResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SetBalancerRunningResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool prev_balance_value = 1; public static final int PREV_BALANCE_VALUE_FIELD_NUMBER = 1; private boolean prevBalanceValue_; /** @@ -28813,13 +30388,11 @@ public final class MasterProtos { return prevBalanceValue_; } - private void initFields() { - prevBalanceValue_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -28827,16 +30400,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, prevBalanceValue_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -28844,19 +30415,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, prevBalanceValue_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -28872,12 +30437,10 @@ public final class MasterProtos { result = result && (getPrevBalanceValue() == other.getPrevBalanceValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -28887,9 +30450,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrevBalanceValue()) { hash = (37 * hash) + PREV_BALANCE_VALUE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getPrevBalanceValue()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getPrevBalanceValue()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -28917,46 +30481,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -28964,14 +30539,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SetBalancerRunningResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SetBalancerRunningResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetBalancerRunningResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetBalancerRunningResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -28984,18 +30560,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); prevBalanceValue_ = false; @@ -29003,10 +30576,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetBalancerRunningResponse_descriptor; @@ -29037,6 +30606,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse)other); @@ -29051,7 +30646,8 @@ public final class MasterProtos { if (other.hasPrevBalanceValue()) { setPrevBalanceValue(other.getPrevBalanceValue()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -29068,7 +30664,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -29078,7 +30674,6 @@ public final class MasterProtos { } private int bitField0_; - // optional bool prev_balance_value = 1; private boolean prevBalanceValue_ ; /** * optional bool prev_balance_value = 1; @@ -29110,54 +30705,83 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SetBalancerRunningResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.SetBalancerRunningResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse DEFAULT_INSTANCE; static { - defaultInstance = new SetBalancerRunningResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SetBalancerRunningResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetBalancerRunningResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SetBalancerRunningResponse) } - public interface IsBalancerEnabledRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsBalancerEnabledRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsBalancerEnabledRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.IsBalancerEnabledRequest} */ - public static final class IsBalancerEnabledRequest extends - com.google.protobuf.GeneratedMessage - implements IsBalancerEnabledRequestOrBuilder { + public static final class IsBalancerEnabledRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsBalancerEnabledRequest) + IsBalancerEnabledRequestOrBuilder { // Use IsBalancerEnabledRequest.newBuilder() to construct. - private IsBalancerEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsBalancerEnabledRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IsBalancerEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsBalancerEnabledRequest defaultInstance; - public static IsBalancerEnabledRequest getDefaultInstance() { - return defaultInstance; } - - public IsBalancerEnabledRequest getDefaultInstanceForType() { - return defaultInstance; + private IsBalancerEnabledRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsBalancerEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -29181,7 +30805,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -29192,34 +30816,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsBalancerEnabledRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsBalancerEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsBalancerEnabledRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsBalancerEnabledRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -29227,29 +30835,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -29260,12 +30860,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -29273,7 +30871,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -29301,46 +30899,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -29348,14 +30957,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsBalancerEnabledRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsBalancerEnabledRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsBalancerEnabledRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsBalancerEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -29368,27 +30978,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsBalancerEnabledRequest_descriptor; @@ -29412,6 +31015,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest)other); @@ -29423,7 +31052,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -29440,7 +31070,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -29448,22 +31078,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsBalancerEnabledRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsBalancerEnabledRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest DEFAULT_INSTANCE; static { - defaultInstance = new IsBalancerEnabledRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsBalancerEnabledRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsBalancerEnabledRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsBalancerEnabledRequest) } - public interface IsBalancerEnabledResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsBalancerEnabledResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsBalancerEnabledResponse) + com.google.protobuf.MessageOrBuilder { - // required bool enabled = 1; /** * required bool enabled = 1; */ @@ -29476,36 +31143,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsBalancerEnabledResponse} */ - public static final class IsBalancerEnabledResponse extends - com.google.protobuf.GeneratedMessage - implements IsBalancerEnabledResponseOrBuilder { + public static final class IsBalancerEnabledResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsBalancerEnabledResponse) + IsBalancerEnabledResponseOrBuilder { // Use IsBalancerEnabledResponse.newBuilder() to construct. - private IsBalancerEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsBalancerEnabledResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IsBalancerEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsBalancerEnabledResponse defaultInstance; - public static IsBalancerEnabledResponse getDefaultInstance() { - return defaultInstance; } - - public IsBalancerEnabledResponse getDefaultInstanceForType() { - return defaultInstance; + private IsBalancerEnabledResponse() { + enabled_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsBalancerEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -29535,7 +31194,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -29546,30 +31205,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsBalancerEnabledResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsBalancerEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsBalancerEnabledResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsBalancerEnabledResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool enabled = 1; public static final int ENABLED_FIELD_NUMBER = 1; private boolean enabled_; /** @@ -29585,13 +31228,11 @@ public final class MasterProtos { return enabled_; } - private void initFields() { - enabled_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasEnabled()) { memoizedIsInitialized = 0; @@ -29603,16 +31244,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, enabled_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -29620,19 +31259,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, enabled_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -29648,12 +31281,10 @@ public final class MasterProtos { result = result && (getEnabled() == other.getEnabled()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -29663,9 +31294,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEnabled()) { hash = (37 * hash) + ENABLED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getEnabled()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getEnabled()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -29693,46 +31325,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -29740,14 +31383,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsBalancerEnabledResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsBalancerEnabledResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsBalancerEnabledResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsBalancerEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -29760,18 +31404,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); enabled_ = false; @@ -29779,10 +31420,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsBalancerEnabledResponse_descriptor; @@ -29813,6 +31450,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse)other); @@ -29827,13 +31490,13 @@ public final class MasterProtos { if (other.hasEnabled()) { setEnabled(other.getEnabled()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasEnabled()) { - return false; } return true; @@ -29848,7 +31511,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -29858,7 +31521,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool enabled = 1; private boolean enabled_ ; /** * required bool enabled = 1; @@ -29890,22 +31552,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsBalancerEnabledResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsBalancerEnabledResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse DEFAULT_INSTANCE; static { - defaultInstance = new IsBalancerEnabledResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsBalancerEnabledResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsBalancerEnabledResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsBalancerEnabledResponse) } - public interface SetSplitOrMergeEnabledRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SetSplitOrMergeEnabledRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SetSplitOrMergeEnabledRequest) + com.google.protobuf.MessageOrBuilder { - // required bool enabled = 1; /** * required bool enabled = 1; */ @@ -29915,7 +31614,6 @@ public final class MasterProtos { */ boolean getEnabled(); - // optional bool synchronous = 2; /** * optional bool synchronous = 2; */ @@ -29925,7 +31623,6 @@ public final class MasterProtos { */ boolean getSynchronous(); - // repeated .hbase.pb.MasterSwitchType switch_types = 3; /** * repeated .hbase.pb.MasterSwitchType switch_types = 3; */ @@ -29942,36 +31639,30 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledRequest} */ - public static final class SetSplitOrMergeEnabledRequest extends - com.google.protobuf.GeneratedMessage - implements SetSplitOrMergeEnabledRequestOrBuilder { + public static final class SetSplitOrMergeEnabledRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SetSplitOrMergeEnabledRequest) + SetSplitOrMergeEnabledRequestOrBuilder { // Use SetSplitOrMergeEnabledRequest.newBuilder() to construct. - private SetSplitOrMergeEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private SetSplitOrMergeEnabledRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private SetSplitOrMergeEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SetSplitOrMergeEnabledRequest defaultInstance; - public static SetSplitOrMergeEnabledRequest getDefaultInstance() { - return defaultInstance; - } - - public SetSplitOrMergeEnabledRequest getDefaultInstanceForType() { - return defaultInstance; + private SetSplitOrMergeEnabledRequest() { + enabled_ = false; + synchronous_ = false; + switchTypes_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SetSplitOrMergeEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -30007,10 +31698,10 @@ public final class MasterProtos { unknownFields.mergeVarintField(3, rawValue); } else { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - switchTypes_ = new java.util.ArrayList(); + switchTypes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - switchTypes_.add(value); + switchTypes_.add(rawValue); } break; } @@ -30024,10 +31715,10 @@ public final class MasterProtos { unknownFields.mergeVarintField(3, rawValue); } else { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - switchTypes_ = new java.util.ArrayList(); + switchTypes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - switchTypes_.add(value); + switchTypes_.add(rawValue); } } input.popLimit(oldLimit); @@ -30039,7 +31730,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { switchTypes_ = java.util.Collections.unmodifiableList(switchTypes_); @@ -30053,30 +31744,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetSplitOrMergeEnabledRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SetSplitOrMergeEnabledRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool enabled = 1; public static final int ENABLED_FIELD_NUMBER = 1; private boolean enabled_; /** @@ -30092,7 +31767,6 @@ public final class MasterProtos { return enabled_; } - // optional bool synchronous = 2; public static final int SYNCHRONOUS_FIELD_NUMBER = 2; private boolean synchronous_; /** @@ -30108,14 +31782,23 @@ public final class MasterProtos { return synchronous_; } - // repeated .hbase.pb.MasterSwitchType switch_types = 3; public static final int SWITCH_TYPES_FIELD_NUMBER = 3; - private java.util.List switchTypes_; + private java.util.List switchTypes_; + private static final com.google.protobuf.Internal.ListAdapter.Converter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType> switchTypes_converter_ = + new com.google.protobuf.Internal.ListAdapter.Converter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType>() { + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType convert(java.lang.Integer from) { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType result = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType.valueOf(from); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT : result; + } + }; /** * repeated .hbase.pb.MasterSwitchType switch_types = 3; */ public java.util.List getSwitchTypesList() { - return switchTypes_; + return new com.google.protobuf.Internal.ListAdapter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType>(switchTypes_, switchTypes_converter_); } /** * repeated .hbase.pb.MasterSwitchType switch_types = 3; @@ -30127,18 +31810,14 @@ public final class MasterProtos { * repeated .hbase.pb.MasterSwitchType switch_types = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType getSwitchTypes(int index) { - return switchTypes_.get(index); + return switchTypes_converter_.convert(switchTypes_.get(index)); } - private void initFields() { - enabled_ = false; - synchronous_ = false; - switchTypes_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasEnabled()) { memoizedIsInitialized = 0; @@ -30150,7 +31829,6 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, enabled_); } @@ -30158,14 +31836,13 @@ public final class MasterProtos { output.writeBool(2, synchronous_); } for (int i = 0; i < switchTypes_.size(); i++) { - output.writeEnum(3, switchTypes_.get(i).getNumber()); + output.writeEnum(3, switchTypes_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -30181,24 +31858,18 @@ public final class MasterProtos { int dataSize = 0; for (int i = 0; i < switchTypes_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream - .computeEnumSizeNoTag(switchTypes_.get(i).getNumber()); + .computeEnumSizeNoTag(switchTypes_.get(i)); } size += dataSize; size += 1 * switchTypes_.size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -30219,14 +31890,11 @@ public final class MasterProtos { result = result && (getSynchronous() == other.getSynchronous()); } - result = result && getSwitchTypesList() - .equals(other.getSwitchTypesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && switchTypes_.equals(other.switchTypes_); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -30236,17 +31904,19 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEnabled()) { hash = (37 * hash) + ENABLED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getEnabled()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getEnabled()); } if (hasSynchronous()) { hash = (37 * hash) + SYNCHRONOUS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getSynchronous()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getSynchronous()); } if (getSwitchTypesCount() > 0) { hash = (37 * hash) + SWITCH_TYPES_FIELD_NUMBER; - hash = (53 * hash) + hashEnumList(getSwitchTypesList()); + hash = (53 * hash) + switchTypes_.hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -30274,46 +31944,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -30321,14 +32002,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SetSplitOrMergeEnabledRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -30341,18 +32023,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); enabled_ = false; @@ -30364,10 +32043,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor; @@ -30407,6 +32082,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest)other); @@ -30434,13 +32135,13 @@ public final class MasterProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasEnabled()) { - return false; } return true; @@ -30455,7 +32156,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -30465,7 +32166,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool enabled = 1; private boolean enabled_ ; /** * required bool enabled = 1; @@ -30498,7 +32198,6 @@ public final class MasterProtos { return this; } - // optional bool synchronous = 2; private boolean synchronous_ ; /** * optional bool synchronous = 2; @@ -30531,12 +32230,11 @@ public final class MasterProtos { return this; } - // repeated .hbase.pb.MasterSwitchType switch_types = 3; - private java.util.List switchTypes_ = + private java.util.List switchTypes_ = java.util.Collections.emptyList(); private void ensureSwitchTypesIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { - switchTypes_ = new java.util.ArrayList(switchTypes_); + switchTypes_ = new java.util.ArrayList(switchTypes_); bitField0_ |= 0x00000004; } } @@ -30544,7 +32242,8 @@ public final class MasterProtos { * repeated .hbase.pb.MasterSwitchType switch_types = 3; */ public java.util.List getSwitchTypesList() { - return java.util.Collections.unmodifiableList(switchTypes_); + return new com.google.protobuf.Internal.ListAdapter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType>(switchTypes_, switchTypes_converter_); } /** * repeated .hbase.pb.MasterSwitchType switch_types = 3; @@ -30556,7 +32255,7 @@ public final class MasterProtos { * repeated .hbase.pb.MasterSwitchType switch_types = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType getSwitchTypes(int index) { - return switchTypes_.get(index); + return switchTypes_converter_.convert(switchTypes_.get(index)); } /** * repeated .hbase.pb.MasterSwitchType switch_types = 3; @@ -30567,7 +32266,7 @@ public final class MasterProtos { throw new NullPointerException(); } ensureSwitchTypesIsMutable(); - switchTypes_.set(index, value); + switchTypes_.set(index, value.getNumber()); onChanged(); return this; } @@ -30579,7 +32278,7 @@ public final class MasterProtos { throw new NullPointerException(); } ensureSwitchTypesIsMutable(); - switchTypes_.add(value); + switchTypes_.add(value.getNumber()); onChanged(); return this; } @@ -30589,7 +32288,9 @@ public final class MasterProtos { public Builder addAllSwitchTypes( java.lang.Iterable values) { ensureSwitchTypesIsMutable(); - super.addAll(values, switchTypes_); + for (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType value : values) { + switchTypes_.add(value.getNumber()); + } onChanged(); return this; } @@ -30602,22 +32303,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SetSplitOrMergeEnabledRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.SetSplitOrMergeEnabledRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest DEFAULT_INSTANCE; static { - defaultInstance = new SetSplitOrMergeEnabledRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SetSplitOrMergeEnabledRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetSplitOrMergeEnabledRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SetSplitOrMergeEnabledRequest) } - public interface SetSplitOrMergeEnabledResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SetSplitOrMergeEnabledResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SetSplitOrMergeEnabledResponse) + com.google.protobuf.MessageOrBuilder { - // repeated bool prev_value = 1; /** * repeated bool prev_value = 1; */ @@ -30634,36 +32372,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledResponse} */ - public static final class SetSplitOrMergeEnabledResponse extends - com.google.protobuf.GeneratedMessage - implements SetSplitOrMergeEnabledResponseOrBuilder { + public static final class SetSplitOrMergeEnabledResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SetSplitOrMergeEnabledResponse) + SetSplitOrMergeEnabledResponseOrBuilder { // Use SetSplitOrMergeEnabledResponse.newBuilder() to construct. - private SetSplitOrMergeEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private SetSplitOrMergeEnabledResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SetSplitOrMergeEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SetSplitOrMergeEnabledResponse defaultInstance; - public static SetSplitOrMergeEnabledResponse getDefaultInstance() { - return defaultInstance; } - - public SetSplitOrMergeEnabledResponse getDefaultInstanceForType() { - return defaultInstance; + private SetSplitOrMergeEnabledResponse() { + prevValue_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SetSplitOrMergeEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -30709,7 +32439,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { prevValue_ = java.util.Collections.unmodifiableList(prevValue_); @@ -30723,29 +32453,13 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetSplitOrMergeEnabledResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SetSplitOrMergeEnabledResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated bool prev_value = 1; public static final int PREV_VALUE_FIELD_NUMBER = 1; private java.util.List prevValue_; /** @@ -30768,13 +32482,11 @@ public final class MasterProtos { return prevValue_.get(index); } - private void initFields() { - prevValue_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -30782,16 +32494,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < prevValue_.size(); i++) { output.writeBool(1, prevValue_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -30801,19 +32511,13 @@ public final class MasterProtos { size += dataSize; size += 1 * getPrevValueList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -30826,12 +32530,10 @@ public final class MasterProtos { boolean result = true; result = result && getPrevValueList() .equals(other.getPrevValueList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -30843,7 +32545,7 @@ public final class MasterProtos { hash = (37 * hash) + PREV_VALUE_FIELD_NUMBER; hash = (53 * hash) + getPrevValueList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -30871,46 +32573,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -30918,14 +32631,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SetSplitOrMergeEnabledResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SetSplitOrMergeEnabledResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -30938,18 +32652,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); prevValue_ = java.util.Collections.emptyList(); @@ -30957,10 +32668,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor; @@ -30990,6 +32697,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse)other); @@ -31011,7 +32744,8 @@ public final class MasterProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -31028,7 +32762,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -31038,7 +32772,6 @@ public final class MasterProtos { } private int bitField0_; - // repeated bool prev_value = 1; private java.util.List prevValue_ = java.util.Collections.emptyList(); private void ensurePrevValueIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { @@ -31090,7 +32823,8 @@ public final class MasterProtos { public Builder addAllPrevValue( java.lang.Iterable values) { ensurePrevValueIsMutable(); - super.addAll(values, prevValue_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, prevValue_); onChanged(); return this; } @@ -31103,22 +32837,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SetSplitOrMergeEnabledResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.SetSplitOrMergeEnabledResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse DEFAULT_INSTANCE; static { - defaultInstance = new SetSplitOrMergeEnabledResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SetSplitOrMergeEnabledResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetSplitOrMergeEnabledResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SetSplitOrMergeEnabledResponse) } - public interface IsSplitOrMergeEnabledRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsSplitOrMergeEnabledRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsSplitOrMergeEnabledRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.MasterSwitchType switch_type = 1; /** * required .hbase.pb.MasterSwitchType switch_type = 1; */ @@ -31131,36 +32902,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsSplitOrMergeEnabledRequest} */ - public static final class IsSplitOrMergeEnabledRequest extends - com.google.protobuf.GeneratedMessage - implements IsSplitOrMergeEnabledRequestOrBuilder { + public static final class IsSplitOrMergeEnabledRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsSplitOrMergeEnabledRequest) + IsSplitOrMergeEnabledRequestOrBuilder { // Use IsSplitOrMergeEnabledRequest.newBuilder() to construct. - private IsSplitOrMergeEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsSplitOrMergeEnabledRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private IsSplitOrMergeEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsSplitOrMergeEnabledRequest defaultInstance; - public static IsSplitOrMergeEnabledRequest getDefaultInstance() { - return defaultInstance; - } - - public IsSplitOrMergeEnabledRequest getDefaultInstanceForType() { - return defaultInstance; + private IsSplitOrMergeEnabledRequest() { + switchType_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsSplitOrMergeEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -31186,7 +32949,7 @@ public final class MasterProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - switchType_ = value; + switchType_ = rawValue; } break; } @@ -31196,7 +32959,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -31207,32 +32970,16 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsSplitOrMergeEnabledRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsSplitOrMergeEnabledRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.MasterSwitchType switch_type = 1; public static final int SWITCH_TYPE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType switchType_; + private int switchType_; /** * required .hbase.pb.MasterSwitchType switch_type = 1; */ @@ -31243,16 +32990,15 @@ public final class MasterProtos { * required .hbase.pb.MasterSwitchType switch_type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType getSwitchType() { - return switchType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType result = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType.valueOf(switchType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT : result; } - private void initFields() { - switchType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasSwitchType()) { memoizedIsInitialized = 0; @@ -31264,36 +33010,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, switchType_.getNumber()); + output.writeEnum(1, switchType_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, switchType_.getNumber()); + .computeEnumSize(1, switchType_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -31306,15 +33044,12 @@ public final class MasterProtos { boolean result = true; result = result && (hasSwitchType() == other.hasSwitchType()); if (hasSwitchType()) { - result = result && - (getSwitchType() == other.getSwitchType()); + result = result && switchType_ == other.switchType_; } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -31324,9 +33059,9 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSwitchType()) { hash = (37 * hash) + SWITCH_TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getSwitchType()); + hash = (53 * hash) + switchType_; } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -31354,46 +33089,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -31401,14 +33147,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsSplitOrMergeEnabledRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsSplitOrMergeEnabledRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -31421,29 +33168,22 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - switchType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; + switchType_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor; @@ -31474,6 +33214,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest)other); @@ -31488,13 +33254,13 @@ public final class MasterProtos { if (other.hasSwitchType()) { setSwitchType(other.getSwitchType()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasSwitchType()) { - return false; } return true; @@ -31509,7 +33275,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -31519,8 +33285,7 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.MasterSwitchType switch_type = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType switchType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; + private int switchType_ = 0; /** * required .hbase.pb.MasterSwitchType switch_type = 1; */ @@ -31531,7 +33296,8 @@ public final class MasterProtos { * required .hbase.pb.MasterSwitchType switch_type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType getSwitchType() { - return switchType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType result = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType.valueOf(switchType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT : result; } /** * required .hbase.pb.MasterSwitchType switch_type = 1; @@ -31541,7 +33307,7 @@ public final class MasterProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - switchType_ = value; + switchType_ = value.getNumber(); onChanged(); return this; } @@ -31550,26 +33316,63 @@ public final class MasterProtos { */ public Builder clearSwitchType() { bitField0_ = (bitField0_ & ~0x00000001); - switchType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; + switchType_ = 0; onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsSplitOrMergeEnabledRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsSplitOrMergeEnabledRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest DEFAULT_INSTANCE; static { - defaultInstance = new IsSplitOrMergeEnabledRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsSplitOrMergeEnabledRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsSplitOrMergeEnabledRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsSplitOrMergeEnabledRequest) } - public interface IsSplitOrMergeEnabledResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsSplitOrMergeEnabledResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsSplitOrMergeEnabledResponse) + com.google.protobuf.MessageOrBuilder { - // required bool enabled = 1; /** * required bool enabled = 1; */ @@ -31582,36 +33385,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsSplitOrMergeEnabledResponse} */ - public static final class IsSplitOrMergeEnabledResponse extends - com.google.protobuf.GeneratedMessage - implements IsSplitOrMergeEnabledResponseOrBuilder { + public static final class IsSplitOrMergeEnabledResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsSplitOrMergeEnabledResponse) + IsSplitOrMergeEnabledResponseOrBuilder { // Use IsSplitOrMergeEnabledResponse.newBuilder() to construct. - private IsSplitOrMergeEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsSplitOrMergeEnabledResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IsSplitOrMergeEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsSplitOrMergeEnabledResponse defaultInstance; - public static IsSplitOrMergeEnabledResponse getDefaultInstance() { - return defaultInstance; } - - public IsSplitOrMergeEnabledResponse getDefaultInstanceForType() { - return defaultInstance; + private IsSplitOrMergeEnabledResponse() { + enabled_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsSplitOrMergeEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -31641,7 +33436,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -31652,30 +33447,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsSplitOrMergeEnabledResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsSplitOrMergeEnabledResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool enabled = 1; public static final int ENABLED_FIELD_NUMBER = 1; private boolean enabled_; /** @@ -31691,13 +33470,11 @@ public final class MasterProtos { return enabled_; } - private void initFields() { - enabled_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasEnabled()) { memoizedIsInitialized = 0; @@ -31709,16 +33486,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, enabled_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -31726,19 +33501,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, enabled_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -31754,12 +33523,10 @@ public final class MasterProtos { result = result && (getEnabled() == other.getEnabled()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -31769,9 +33536,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEnabled()) { hash = (37 * hash) + ENABLED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getEnabled()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getEnabled()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -31799,46 +33567,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -31846,14 +33625,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsSplitOrMergeEnabledResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsSplitOrMergeEnabledResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -31866,18 +33646,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); enabled_ = false; @@ -31885,10 +33662,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor; @@ -31919,6 +33692,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse)other); @@ -31933,13 +33732,13 @@ public final class MasterProtos { if (other.hasEnabled()) { setEnabled(other.getEnabled()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasEnabled()) { - return false; } return true; @@ -31954,7 +33753,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -31964,7 +33763,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool enabled = 1; private boolean enabled_ ; /** * required bool enabled = 1; @@ -31996,54 +33794,83 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsSplitOrMergeEnabledResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsSplitOrMergeEnabledResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse DEFAULT_INSTANCE; static { - defaultInstance = new IsSplitOrMergeEnabledResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsSplitOrMergeEnabledResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsSplitOrMergeEnabledResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsSplitOrMergeEnabledResponse) } - public interface NormalizeRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface NormalizeRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.NormalizeRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.NormalizeRequest} */ - public static final class NormalizeRequest extends - com.google.protobuf.GeneratedMessage - implements NormalizeRequestOrBuilder { + public static final class NormalizeRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.NormalizeRequest) + NormalizeRequestOrBuilder { // Use NormalizeRequest.newBuilder() to construct. - private NormalizeRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private NormalizeRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private NormalizeRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final NormalizeRequest defaultInstance; - public static NormalizeRequest getDefaultInstance() { - return defaultInstance; - } - - public NormalizeRequest getDefaultInstanceForType() { - return defaultInstance; + private NormalizeRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private NormalizeRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -32067,7 +33894,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -32078,34 +33905,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NormalizeRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new NormalizeRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -32113,29 +33924,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -32146,12 +33949,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -32159,7 +33960,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -32187,46 +33988,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -32234,14 +34046,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.NormalizeRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.NormalizeRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -32254,27 +34067,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; @@ -32298,6 +34104,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest)other); @@ -32309,7 +34141,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -32326,7 +34159,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -32334,22 +34167,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.NormalizeRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest DEFAULT_INSTANCE; static { - defaultInstance = new NormalizeRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NormalizeRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NormalizeRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeRequest) } - public interface NormalizeResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface NormalizeResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.NormalizeResponse) + com.google.protobuf.MessageOrBuilder { - // required bool normalizer_ran = 1; /** * required bool normalizer_ran = 1; */ @@ -32362,36 +34232,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.NormalizeResponse} */ - public static final class NormalizeResponse extends - com.google.protobuf.GeneratedMessage - implements NormalizeResponseOrBuilder { + public static final class NormalizeResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.NormalizeResponse) + NormalizeResponseOrBuilder { // Use NormalizeResponse.newBuilder() to construct. - private NormalizeResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private NormalizeResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private NormalizeResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final NormalizeResponse defaultInstance; - public static NormalizeResponse getDefaultInstance() { - return defaultInstance; } - - public NormalizeResponse getDefaultInstanceForType() { - return defaultInstance; + private NormalizeResponse() { + normalizerRan_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private NormalizeResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -32421,7 +34283,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -32432,30 +34294,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NormalizeResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new NormalizeResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool normalizer_ran = 1; public static final int NORMALIZER_RAN_FIELD_NUMBER = 1; private boolean normalizerRan_; /** @@ -32471,13 +34317,11 @@ public final class MasterProtos { return normalizerRan_; } - private void initFields() { - normalizerRan_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasNormalizerRan()) { memoizedIsInitialized = 0; @@ -32489,16 +34333,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, normalizerRan_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -32506,19 +34348,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, normalizerRan_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -32534,12 +34370,10 @@ public final class MasterProtos { result = result && (getNormalizerRan() == other.getNormalizerRan()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -32549,9 +34383,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNormalizerRan()) { hash = (37 * hash) + NORMALIZER_RAN_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getNormalizerRan()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getNormalizerRan()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -32579,46 +34414,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -32626,14 +34472,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.NormalizeResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.NormalizeResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -32646,18 +34493,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); normalizerRan_ = false; @@ -32665,10 +34509,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; @@ -32699,6 +34539,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse)other); @@ -32713,13 +34579,13 @@ public final class MasterProtos { if (other.hasNormalizerRan()) { setNormalizerRan(other.getNormalizerRan()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasNormalizerRan()) { - return false; } return true; @@ -32734,7 +34600,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -32744,7 +34610,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool normalizer_ran = 1; private boolean normalizerRan_ ; /** * required bool normalizer_ran = 1; @@ -32776,22 +34641,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.NormalizeResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse DEFAULT_INSTANCE; static { - defaultInstance = new NormalizeResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NormalizeResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NormalizeResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeResponse) } - public interface SetNormalizerRunningRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SetNormalizerRunningRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SetNormalizerRunningRequest) + com.google.protobuf.MessageOrBuilder { - // required bool on = 1; /** * required bool on = 1; */ @@ -32804,36 +34706,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.SetNormalizerRunningRequest} */ - public static final class SetNormalizerRunningRequest extends - com.google.protobuf.GeneratedMessage - implements SetNormalizerRunningRequestOrBuilder { + public static final class SetNormalizerRunningRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SetNormalizerRunningRequest) + SetNormalizerRunningRequestOrBuilder { // Use SetNormalizerRunningRequest.newBuilder() to construct. - private SetNormalizerRunningRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private SetNormalizerRunningRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SetNormalizerRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SetNormalizerRunningRequest defaultInstance; - public static SetNormalizerRunningRequest getDefaultInstance() { - return defaultInstance; } - - public SetNormalizerRunningRequest getDefaultInstanceForType() { - return defaultInstance; + private SetNormalizerRunningRequest() { + on_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SetNormalizerRunningRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -32863,7 +34757,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -32874,30 +34768,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetNormalizerRunningRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SetNormalizerRunningRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool on = 1; public static final int ON_FIELD_NUMBER = 1; private boolean on_; /** @@ -32913,13 +34791,11 @@ public final class MasterProtos { return on_; } - private void initFields() { - on_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasOn()) { memoizedIsInitialized = 0; @@ -32931,16 +34807,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, on_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -32948,19 +34822,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, on_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -32976,12 +34844,10 @@ public final class MasterProtos { result = result && (getOn() == other.getOn()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -32991,9 +34857,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasOn()) { hash = (37 * hash) + ON_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getOn()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getOn()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -33021,46 +34888,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -33068,14 +34946,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SetNormalizerRunningRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SetNormalizerRunningRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -33088,18 +34967,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); on_ = false; @@ -33107,10 +34983,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; @@ -33141,6 +35013,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)other); @@ -33155,13 +35053,13 @@ public final class MasterProtos { if (other.hasOn()) { setOn(other.getOn()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasOn()) { - return false; } return true; @@ -33176,7 +35074,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -33186,7 +35084,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool on = 1; private boolean on_ ; /** * required bool on = 1; @@ -33218,22 +35115,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SetNormalizerRunningRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest DEFAULT_INSTANCE; static { - defaultInstance = new SetNormalizerRunningRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SetNormalizerRunningRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetNormalizerRunningRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningRequest) } - public interface SetNormalizerRunningResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SetNormalizerRunningResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SetNormalizerRunningResponse) + com.google.protobuf.MessageOrBuilder { - // optional bool prev_normalizer_value = 1; /** * optional bool prev_normalizer_value = 1; */ @@ -33246,36 +35180,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.SetNormalizerRunningResponse} */ - public static final class SetNormalizerRunningResponse extends - com.google.protobuf.GeneratedMessage - implements SetNormalizerRunningResponseOrBuilder { + public static final class SetNormalizerRunningResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SetNormalizerRunningResponse) + SetNormalizerRunningResponseOrBuilder { // Use SetNormalizerRunningResponse.newBuilder() to construct. - private SetNormalizerRunningResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private SetNormalizerRunningResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SetNormalizerRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SetNormalizerRunningResponse defaultInstance; - public static SetNormalizerRunningResponse getDefaultInstance() { - return defaultInstance; } - - public SetNormalizerRunningResponse getDefaultInstanceForType() { - return defaultInstance; + private SetNormalizerRunningResponse() { + prevNormalizerValue_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SetNormalizerRunningResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -33305,7 +35231,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -33316,30 +35242,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetNormalizerRunningResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SetNormalizerRunningResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool prev_normalizer_value = 1; public static final int PREV_NORMALIZER_VALUE_FIELD_NUMBER = 1; private boolean prevNormalizerValue_; /** @@ -33355,13 +35265,11 @@ public final class MasterProtos { return prevNormalizerValue_; } - private void initFields() { - prevNormalizerValue_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -33369,16 +35277,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, prevNormalizerValue_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -33386,19 +35292,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, prevNormalizerValue_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -33414,12 +35314,10 @@ public final class MasterProtos { result = result && (getPrevNormalizerValue() == other.getPrevNormalizerValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -33429,9 +35327,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrevNormalizerValue()) { hash = (37 * hash) + PREV_NORMALIZER_VALUE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getPrevNormalizerValue()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getPrevNormalizerValue()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -33459,46 +35358,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -33506,14 +35416,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SetNormalizerRunningResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SetNormalizerRunningResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -33526,18 +35437,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); prevNormalizerValue_ = false; @@ -33545,10 +35453,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; @@ -33579,6 +35483,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)other); @@ -33593,7 +35523,8 @@ public final class MasterProtos { if (other.hasPrevNormalizerValue()) { setPrevNormalizerValue(other.getPrevNormalizerValue()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -33610,7 +35541,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -33620,7 +35551,6 @@ public final class MasterProtos { } private int bitField0_; - // optional bool prev_normalizer_value = 1; private boolean prevNormalizerValue_ ; /** * optional bool prev_normalizer_value = 1; @@ -33652,54 +35582,83 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SetNormalizerRunningResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse DEFAULT_INSTANCE; static { - defaultInstance = new SetNormalizerRunningResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SetNormalizerRunningResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetNormalizerRunningResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningResponse) } - public interface IsNormalizerEnabledRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsNormalizerEnabledRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsNormalizerEnabledRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.IsNormalizerEnabledRequest} */ - public static final class IsNormalizerEnabledRequest extends - com.google.protobuf.GeneratedMessage - implements IsNormalizerEnabledRequestOrBuilder { + public static final class IsNormalizerEnabledRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsNormalizerEnabledRequest) + IsNormalizerEnabledRequestOrBuilder { // Use IsNormalizerEnabledRequest.newBuilder() to construct. - private IsNormalizerEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsNormalizerEnabledRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private IsNormalizerEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsNormalizerEnabledRequest defaultInstance; - public static IsNormalizerEnabledRequest getDefaultInstance() { - return defaultInstance; + private IsNormalizerEnabledRequest() { } - public IsNormalizerEnabledRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsNormalizerEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -33723,7 +35682,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -33734,34 +35693,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsNormalizerEnabledRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsNormalizerEnabledRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -33769,29 +35712,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -33802,12 +35737,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -33815,7 +35748,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -33843,46 +35776,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -33890,14 +35834,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsNormalizerEnabledRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsNormalizerEnabledRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -33910,27 +35855,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; @@ -33954,6 +35892,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)other); @@ -33965,7 +35929,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -33982,7 +35947,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -33990,22 +35955,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsNormalizerEnabledRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsNormalizerEnabledRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest DEFAULT_INSTANCE; static { - defaultInstance = new IsNormalizerEnabledRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsNormalizerEnabledRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsNormalizerEnabledRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsNormalizerEnabledRequest) } - public interface IsNormalizerEnabledResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsNormalizerEnabledResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsNormalizerEnabledResponse) + com.google.protobuf.MessageOrBuilder { - // required bool enabled = 1; /** * required bool enabled = 1; */ @@ -34018,36 +36020,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsNormalizerEnabledResponse} */ - public static final class IsNormalizerEnabledResponse extends - com.google.protobuf.GeneratedMessage - implements IsNormalizerEnabledResponseOrBuilder { + public static final class IsNormalizerEnabledResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsNormalizerEnabledResponse) + IsNormalizerEnabledResponseOrBuilder { // Use IsNormalizerEnabledResponse.newBuilder() to construct. - private IsNormalizerEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsNormalizerEnabledResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private IsNormalizerEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsNormalizerEnabledResponse defaultInstance; - public static IsNormalizerEnabledResponse getDefaultInstance() { - return defaultInstance; - } - - public IsNormalizerEnabledResponse getDefaultInstanceForType() { - return defaultInstance; + private IsNormalizerEnabledResponse() { + enabled_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsNormalizerEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -34077,7 +36071,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -34088,30 +36082,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsNormalizerEnabledResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsNormalizerEnabledResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool enabled = 1; public static final int ENABLED_FIELD_NUMBER = 1; private boolean enabled_; /** @@ -34127,13 +36105,11 @@ public final class MasterProtos { return enabled_; } - private void initFields() { - enabled_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasEnabled()) { memoizedIsInitialized = 0; @@ -34145,16 +36121,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, enabled_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -34162,19 +36136,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, enabled_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -34190,12 +36158,10 @@ public final class MasterProtos { result = result && (getEnabled() == other.getEnabled()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -34205,9 +36171,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEnabled()) { hash = (37 * hash) + ENABLED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getEnabled()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getEnabled()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -34235,46 +36202,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -34282,14 +36260,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsNormalizerEnabledResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsNormalizerEnabledResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -34302,18 +36281,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); enabled_ = false; @@ -34321,10 +36297,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor; @@ -34355,6 +36327,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse)other); @@ -34369,13 +36367,13 @@ public final class MasterProtos { if (other.hasEnabled()) { setEnabled(other.getEnabled()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasEnabled()) { - return false; } return true; @@ -34390,7 +36388,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -34400,7 +36398,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool enabled = 1; private boolean enabled_ ; /** * required bool enabled = 1; @@ -34432,54 +36429,83 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsNormalizerEnabledResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsNormalizerEnabledResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse DEFAULT_INSTANCE; static { - defaultInstance = new IsNormalizerEnabledResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsNormalizerEnabledResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsNormalizerEnabledResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsNormalizerEnabledResponse) } - public interface RunCatalogScanRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RunCatalogScanRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RunCatalogScanRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.RunCatalogScanRequest} */ - public static final class RunCatalogScanRequest extends - com.google.protobuf.GeneratedMessage - implements RunCatalogScanRequestOrBuilder { + public static final class RunCatalogScanRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RunCatalogScanRequest) + RunCatalogScanRequestOrBuilder { // Use RunCatalogScanRequest.newBuilder() to construct. - private RunCatalogScanRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private RunCatalogScanRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RunCatalogScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RunCatalogScanRequest defaultInstance; - public static RunCatalogScanRequest getDefaultInstance() { - return defaultInstance; + private RunCatalogScanRequest() { } - public RunCatalogScanRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RunCatalogScanRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -34503,7 +36529,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -34514,34 +36540,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCatalogScanRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCatalogScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RunCatalogScanRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RunCatalogScanRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -34549,29 +36559,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -34582,12 +36584,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -34595,7 +36595,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -34623,46 +36623,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -34670,14 +36681,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.RunCatalogScanRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RunCatalogScanRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCatalogScanRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCatalogScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -34690,27 +36702,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCatalogScanRequest_descriptor; @@ -34734,6 +36739,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest)other); @@ -34745,7 +36776,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -34762,7 +36794,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -34770,22 +36802,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RunCatalogScanRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.RunCatalogScanRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest DEFAULT_INSTANCE; static { - defaultInstance = new RunCatalogScanRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RunCatalogScanRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RunCatalogScanRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RunCatalogScanRequest) } - public interface RunCatalogScanResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RunCatalogScanResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RunCatalogScanResponse) + com.google.protobuf.MessageOrBuilder { - // optional int32 scan_result = 1; /** * optional int32 scan_result = 1; */ @@ -34798,36 +36867,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.RunCatalogScanResponse} */ - public static final class RunCatalogScanResponse extends - com.google.protobuf.GeneratedMessage - implements RunCatalogScanResponseOrBuilder { + public static final class RunCatalogScanResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RunCatalogScanResponse) + RunCatalogScanResponseOrBuilder { // Use RunCatalogScanResponse.newBuilder() to construct. - private RunCatalogScanResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private RunCatalogScanResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RunCatalogScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RunCatalogScanResponse defaultInstance; - public static RunCatalogScanResponse getDefaultInstance() { - return defaultInstance; - } - - public RunCatalogScanResponse getDefaultInstanceForType() { - return defaultInstance; + private RunCatalogScanResponse() { + scanResult_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RunCatalogScanResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -34857,7 +36918,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -34868,30 +36929,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCatalogScanResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCatalogScanResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RunCatalogScanResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RunCatalogScanResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional int32 scan_result = 1; public static final int SCAN_RESULT_FIELD_NUMBER = 1; private int scanResult_; /** @@ -34907,13 +36952,11 @@ public final class MasterProtos { return scanResult_; } - private void initFields() { - scanResult_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -34921,16 +36964,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, scanResult_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -34938,19 +36979,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, scanResult_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -34966,12 +37001,10 @@ public final class MasterProtos { result = result && (getScanResult() == other.getScanResult()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -34983,7 +37016,7 @@ public final class MasterProtos { hash = (37 * hash) + SCAN_RESULT_FIELD_NUMBER; hash = (53 * hash) + getScanResult(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -35011,46 +37044,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -35058,14 +37102,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.RunCatalogScanResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RunCatalogScanResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCatalogScanResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCatalogScanResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -35078,18 +37123,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); scanResult_ = 0; @@ -35097,10 +37139,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCatalogScanResponse_descriptor; @@ -35131,6 +37169,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse)other); @@ -35145,7 +37209,8 @@ public final class MasterProtos { if (other.hasScanResult()) { setScanResult(other.getScanResult()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -35162,7 +37227,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -35172,7 +37237,6 @@ public final class MasterProtos { } private int bitField0_; - // optional int32 scan_result = 1; private int scanResult_ ; /** * optional int32 scan_result = 1; @@ -35204,22 +37268,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RunCatalogScanResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.RunCatalogScanResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse DEFAULT_INSTANCE; static { - defaultInstance = new RunCatalogScanResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RunCatalogScanResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RunCatalogScanResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RunCatalogScanResponse) } - public interface EnableCatalogJanitorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface EnableCatalogJanitorRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.EnableCatalogJanitorRequest) + com.google.protobuf.MessageOrBuilder { - // required bool enable = 1; /** * required bool enable = 1; */ @@ -35232,36 +37333,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.EnableCatalogJanitorRequest} */ - public static final class EnableCatalogJanitorRequest extends - com.google.protobuf.GeneratedMessage - implements EnableCatalogJanitorRequestOrBuilder { + public static final class EnableCatalogJanitorRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.EnableCatalogJanitorRequest) + EnableCatalogJanitorRequestOrBuilder { // Use EnableCatalogJanitorRequest.newBuilder() to construct. - private EnableCatalogJanitorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private EnableCatalogJanitorRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private EnableCatalogJanitorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final EnableCatalogJanitorRequest defaultInstance; - public static EnableCatalogJanitorRequest getDefaultInstance() { - return defaultInstance; - } - - public EnableCatalogJanitorRequest getDefaultInstanceForType() { - return defaultInstance; + private EnableCatalogJanitorRequest() { + enable_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private EnableCatalogJanitorRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -35291,7 +37384,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -35302,30 +37395,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableCatalogJanitorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public EnableCatalogJanitorRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new EnableCatalogJanitorRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool enable = 1; public static final int ENABLE_FIELD_NUMBER = 1; private boolean enable_; /** @@ -35341,13 +37418,11 @@ public final class MasterProtos { return enable_; } - private void initFields() { - enable_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasEnable()) { memoizedIsInitialized = 0; @@ -35359,16 +37434,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, enable_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -35376,19 +37449,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, enable_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -35404,12 +37471,10 @@ public final class MasterProtos { result = result && (getEnable() == other.getEnable()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -35419,9 +37484,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEnable()) { hash = (37 * hash) + ENABLE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getEnable()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getEnable()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -35449,46 +37515,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -35496,14 +37573,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.EnableCatalogJanitorRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.EnableCatalogJanitorRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableCatalogJanitorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -35516,18 +37594,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); enable_ = false; @@ -35535,10 +37610,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor; @@ -35569,6 +37640,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)other); @@ -35583,13 +37680,13 @@ public final class MasterProtos { if (other.hasEnable()) { setEnable(other.getEnable()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasEnable()) { - return false; } return true; @@ -35604,7 +37701,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -35614,7 +37711,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool enable = 1; private boolean enable_ ; /** * required bool enable = 1; @@ -35646,22 +37742,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.EnableCatalogJanitorRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.EnableCatalogJanitorRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest DEFAULT_INSTANCE; static { - defaultInstance = new EnableCatalogJanitorRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public EnableCatalogJanitorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EnableCatalogJanitorRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.EnableCatalogJanitorRequest) } - public interface EnableCatalogJanitorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface EnableCatalogJanitorResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.EnableCatalogJanitorResponse) + com.google.protobuf.MessageOrBuilder { - // optional bool prev_value = 1; /** * optional bool prev_value = 1; */ @@ -35674,36 +37807,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.EnableCatalogJanitorResponse} */ - public static final class EnableCatalogJanitorResponse extends - com.google.protobuf.GeneratedMessage - implements EnableCatalogJanitorResponseOrBuilder { + public static final class EnableCatalogJanitorResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.EnableCatalogJanitorResponse) + EnableCatalogJanitorResponseOrBuilder { // Use EnableCatalogJanitorResponse.newBuilder() to construct. - private EnableCatalogJanitorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private EnableCatalogJanitorResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private EnableCatalogJanitorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final EnableCatalogJanitorResponse defaultInstance; - public static EnableCatalogJanitorResponse getDefaultInstance() { - return defaultInstance; - } - - public EnableCatalogJanitorResponse getDefaultInstanceForType() { - return defaultInstance; + private EnableCatalogJanitorResponse() { + prevValue_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private EnableCatalogJanitorResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -35733,7 +37858,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -35744,30 +37869,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableCatalogJanitorResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public EnableCatalogJanitorResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new EnableCatalogJanitorResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool prev_value = 1; public static final int PREV_VALUE_FIELD_NUMBER = 1; private boolean prevValue_; /** @@ -35783,13 +37892,11 @@ public final class MasterProtos { return prevValue_; } - private void initFields() { - prevValue_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -35797,16 +37904,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, prevValue_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -35814,19 +37919,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, prevValue_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -35842,12 +37941,10 @@ public final class MasterProtos { result = result && (getPrevValue() == other.getPrevValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -35857,9 +37954,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrevValue()) { hash = (37 * hash) + PREV_VALUE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getPrevValue()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getPrevValue()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -35887,46 +37985,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -35934,14 +38043,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.EnableCatalogJanitorResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.EnableCatalogJanitorResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableCatalogJanitorResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -35954,18 +38064,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); prevValue_ = false; @@ -35973,10 +38080,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor; @@ -36007,6 +38110,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse)other); @@ -36021,7 +38150,8 @@ public final class MasterProtos { if (other.hasPrevValue()) { setPrevValue(other.getPrevValue()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -36038,7 +38168,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -36048,7 +38178,6 @@ public final class MasterProtos { } private int bitField0_; - // optional bool prev_value = 1; private boolean prevValue_ ; /** * optional bool prev_value = 1; @@ -36080,54 +38209,83 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.EnableCatalogJanitorResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.EnableCatalogJanitorResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse DEFAULT_INSTANCE; static { - defaultInstance = new EnableCatalogJanitorResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public EnableCatalogJanitorResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EnableCatalogJanitorResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.EnableCatalogJanitorResponse) } - public interface IsCatalogJanitorEnabledRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsCatalogJanitorEnabledRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsCatalogJanitorEnabledRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.IsCatalogJanitorEnabledRequest} */ - public static final class IsCatalogJanitorEnabledRequest extends - com.google.protobuf.GeneratedMessage - implements IsCatalogJanitorEnabledRequestOrBuilder { + public static final class IsCatalogJanitorEnabledRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsCatalogJanitorEnabledRequest) + IsCatalogJanitorEnabledRequestOrBuilder { // Use IsCatalogJanitorEnabledRequest.newBuilder() to construct. - private IsCatalogJanitorEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsCatalogJanitorEnabledRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private IsCatalogJanitorEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsCatalogJanitorEnabledRequest defaultInstance; - public static IsCatalogJanitorEnabledRequest getDefaultInstance() { - return defaultInstance; - } - - public IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { - return defaultInstance; + private IsCatalogJanitorEnabledRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsCatalogJanitorEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -36151,7 +38309,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -36162,34 +38320,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsCatalogJanitorEnabledRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsCatalogJanitorEnabledRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -36197,29 +38339,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -36230,12 +38364,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -36243,7 +38375,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -36271,46 +38403,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -36318,14 +38461,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsCatalogJanitorEnabledRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsCatalogJanitorEnabledRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -36338,27 +38482,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor; @@ -36382,6 +38519,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)other); @@ -36393,7 +38556,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -36410,7 +38574,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -36418,22 +38582,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsCatalogJanitorEnabledRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsCatalogJanitorEnabledRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest DEFAULT_INSTANCE; static { - defaultInstance = new IsCatalogJanitorEnabledRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsCatalogJanitorEnabledRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsCatalogJanitorEnabledRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsCatalogJanitorEnabledRequest) } - public interface IsCatalogJanitorEnabledResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsCatalogJanitorEnabledResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsCatalogJanitorEnabledResponse) + com.google.protobuf.MessageOrBuilder { - // required bool value = 1; /** * required bool value = 1; */ @@ -36446,36 +38647,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsCatalogJanitorEnabledResponse} */ - public static final class IsCatalogJanitorEnabledResponse extends - com.google.protobuf.GeneratedMessage - implements IsCatalogJanitorEnabledResponseOrBuilder { + public static final class IsCatalogJanitorEnabledResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsCatalogJanitorEnabledResponse) + IsCatalogJanitorEnabledResponseOrBuilder { // Use IsCatalogJanitorEnabledResponse.newBuilder() to construct. - private IsCatalogJanitorEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsCatalogJanitorEnabledResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IsCatalogJanitorEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsCatalogJanitorEnabledResponse defaultInstance; - public static IsCatalogJanitorEnabledResponse getDefaultInstance() { - return defaultInstance; } - - public IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { - return defaultInstance; + private IsCatalogJanitorEnabledResponse() { + value_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsCatalogJanitorEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -36505,7 +38698,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -36516,30 +38709,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsCatalogJanitorEnabledResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsCatalogJanitorEnabledResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool value = 1; public static final int VALUE_FIELD_NUMBER = 1; private boolean value_; /** @@ -36555,13 +38732,11 @@ public final class MasterProtos { return value_; } - private void initFields() { - value_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasValue()) { memoizedIsInitialized = 0; @@ -36573,16 +38748,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, value_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -36590,19 +38763,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, value_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -36618,12 +38785,10 @@ public final class MasterProtos { result = result && (getValue() == other.getValue()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -36633,9 +38798,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getValue()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getValue()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -36663,46 +38829,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -36710,14 +38887,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsCatalogJanitorEnabledResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsCatalogJanitorEnabledResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -36730,18 +38908,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); value_ = false; @@ -36749,10 +38924,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor; @@ -36783,6 +38954,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse)other); @@ -36797,13 +38994,13 @@ public final class MasterProtos { if (other.hasValue()) { setValue(other.getValue()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasValue()) { - return false; } return true; @@ -36818,7 +39015,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -36828,7 +39025,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool value = 1; private boolean value_ ; /** * required bool value = 1; @@ -36860,22 +39056,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsCatalogJanitorEnabledResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsCatalogJanitorEnabledResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse DEFAULT_INSTANCE; static { - defaultInstance = new IsCatalogJanitorEnabledResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsCatalogJanitorEnabledResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsCatalogJanitorEnabledResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsCatalogJanitorEnabledResponse) } - public interface SnapshotRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SnapshotRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.SnapshotDescription snapshot = 1; /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -36892,36 +39125,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.SnapshotRequest} */ - public static final class SnapshotRequest extends - com.google.protobuf.GeneratedMessage - implements SnapshotRequestOrBuilder { + public static final class SnapshotRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotRequest) + SnapshotRequestOrBuilder { // Use SnapshotRequest.newBuilder() to construct. - private SnapshotRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private SnapshotRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SnapshotRequest defaultInstance; - public static SnapshotRequest getDefaultInstance() { - return defaultInstance; } - - public SnapshotRequest getDefaultInstanceForType() { - return defaultInstance; + private SnapshotRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SnapshotRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -36959,7 +39183,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -36970,30 +39194,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SnapshotRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SnapshotRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** @@ -37006,22 +39214,20 @@ public final class MasterProtos { * required .hbase.pb.SnapshotDescription snapshot = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } - private void initFields() { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasSnapshot()) { memoizedIsInitialized = 0; @@ -37037,36 +39243,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, snapshot_); + output.writeMessage(1, getSnapshot()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, snapshot_); + .computeMessageSize(1, getSnapshot()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -37082,12 +39280,10 @@ public final class MasterProtos { result = result && getSnapshot() .equals(other.getSnapshot()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -37099,7 +39295,7 @@ public final class MasterProtos { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -37127,46 +39323,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -37174,14 +39381,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SnapshotRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -37194,23 +39402,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; } else { snapshotBuilder_.clear(); } @@ -37218,10 +39423,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_descriptor; @@ -37256,6 +39457,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest)other); @@ -37270,17 +39497,16 @@ public final class MasterProtos { if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasSnapshot()) { - return false; } if (!getSnapshot().isInitialized()) { - return false; } return true; @@ -37295,7 +39521,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -37305,9 +39531,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.SnapshotDescription snapshot = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * required .hbase.pb.SnapshotDescription snapshot = 1; @@ -37320,7 +39545,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } else { return snapshotBuilder_.getMessage(); } @@ -37361,6 +39586,7 @@ public final class MasterProtos { public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + snapshot_ != null && snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); @@ -37379,7 +39605,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; onChanged(); } else { snapshotBuilder_.clear(); @@ -37402,41 +39628,79 @@ public final class MasterProtos { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { - return snapshot_; + return snapshot_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } } /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { - snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< + snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( - snapshot_, + getSnapshot(), getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest DEFAULT_INSTANCE; static { - defaultInstance = new SnapshotRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SnapshotRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SnapshotRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRequest) } - public interface SnapshotResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SnapshotResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotResponse) + com.google.protobuf.MessageOrBuilder { - // required int64 expected_timeout = 1; /** * required int64 expected_timeout = 1; */ @@ -37449,36 +39713,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.SnapshotResponse} */ - public static final class SnapshotResponse extends - com.google.protobuf.GeneratedMessage - implements SnapshotResponseOrBuilder { + public static final class SnapshotResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotResponse) + SnapshotResponseOrBuilder { // Use SnapshotResponse.newBuilder() to construct. - private SnapshotResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private SnapshotResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SnapshotResponse defaultInstance; - public static SnapshotResponse getDefaultInstance() { - return defaultInstance; } - - public SnapshotResponse getDefaultInstanceForType() { - return defaultInstance; + private SnapshotResponse() { + expectedTimeout_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SnapshotResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -37508,7 +39764,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -37519,30 +39775,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SnapshotResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SnapshotResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required int64 expected_timeout = 1; public static final int EXPECTED_TIMEOUT_FIELD_NUMBER = 1; private long expectedTimeout_; /** @@ -37558,13 +39798,11 @@ public final class MasterProtos { return expectedTimeout_; } - private void initFields() { - expectedTimeout_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasExpectedTimeout()) { memoizedIsInitialized = 0; @@ -37576,16 +39814,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, expectedTimeout_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -37593,19 +39829,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, expectedTimeout_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -37621,12 +39851,10 @@ public final class MasterProtos { result = result && (getExpectedTimeout() == other.getExpectedTimeout()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -37636,9 +39864,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasExpectedTimeout()) { hash = (37 * hash) + EXPECTED_TIMEOUT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getExpectedTimeout()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getExpectedTimeout()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -37666,46 +39895,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -37713,14 +39953,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SnapshotResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -37733,18 +39974,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); expectedTimeout_ = 0L; @@ -37752,10 +39990,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_descriptor; @@ -37786,6 +40020,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse)other); @@ -37800,13 +40060,13 @@ public final class MasterProtos { if (other.hasExpectedTimeout()) { setExpectedTimeout(other.getExpectedTimeout()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasExpectedTimeout()) { - return false; } return true; @@ -37821,7 +40081,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -37831,7 +40091,6 @@ public final class MasterProtos { } private int bitField0_; - // required int64 expected_timeout = 1; private long expectedTimeout_ ; /** * required int64 expected_timeout = 1; @@ -37863,54 +40122,83 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse DEFAULT_INSTANCE; static { - defaultInstance = new SnapshotResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SnapshotResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SnapshotResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotResponse) } - public interface GetCompletedSnapshotsRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetCompletedSnapshotsRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetCompletedSnapshotsRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.GetCompletedSnapshotsRequest} */ - public static final class GetCompletedSnapshotsRequest extends - com.google.protobuf.GeneratedMessage - implements GetCompletedSnapshotsRequestOrBuilder { + public static final class GetCompletedSnapshotsRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetCompletedSnapshotsRequest) + GetCompletedSnapshotsRequestOrBuilder { // Use GetCompletedSnapshotsRequest.newBuilder() to construct. - private GetCompletedSnapshotsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetCompletedSnapshotsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetCompletedSnapshotsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetCompletedSnapshotsRequest defaultInstance; - public static GetCompletedSnapshotsRequest getDefaultInstance() { - return defaultInstance; - } - - public GetCompletedSnapshotsRequest getDefaultInstanceForType() { - return defaultInstance; + private GetCompletedSnapshotsRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetCompletedSnapshotsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -37934,7 +40222,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -37945,34 +40233,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetCompletedSnapshotsRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetCompletedSnapshotsRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -37980,29 +40252,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -38013,12 +40277,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -38026,7 +40288,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -38054,46 +40316,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -38101,14 +40374,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetCompletedSnapshotsRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetCompletedSnapshotsRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -38121,27 +40395,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor; @@ -38165,6 +40432,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)other); @@ -38176,7 +40469,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -38193,7 +40487,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -38201,22 +40495,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetCompletedSnapshotsRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetCompletedSnapshotsRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetCompletedSnapshotsRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetCompletedSnapshotsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetCompletedSnapshotsRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetCompletedSnapshotsRequest) } - public interface GetCompletedSnapshotsResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetCompletedSnapshotsResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetCompletedSnapshotsResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.SnapshotDescription snapshots = 1; /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ @@ -38244,36 +40575,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetCompletedSnapshotsResponse} */ - public static final class GetCompletedSnapshotsResponse extends - com.google.protobuf.GeneratedMessage - implements GetCompletedSnapshotsResponseOrBuilder { + public static final class GetCompletedSnapshotsResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetCompletedSnapshotsResponse) + GetCompletedSnapshotsResponseOrBuilder { // Use GetCompletedSnapshotsResponse.newBuilder() to construct. - private GetCompletedSnapshotsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetCompletedSnapshotsResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetCompletedSnapshotsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetCompletedSnapshotsResponse defaultInstance; - public static GetCompletedSnapshotsResponse getDefaultInstance() { - return defaultInstance; } - - public GetCompletedSnapshotsResponse getDefaultInstanceForType() { - return defaultInstance; + private GetCompletedSnapshotsResponse() { + snapshots_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetCompletedSnapshotsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -38297,7 +40620,8 @@ public final class MasterProtos { snapshots_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - snapshots_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry)); + snapshots_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry)); break; } } @@ -38306,7 +40630,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { snapshots_ = java.util.Collections.unmodifiableList(snapshots_); @@ -38320,29 +40644,13 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetCompletedSnapshotsResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetCompletedSnapshotsResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.SnapshotDescription snapshots = 1; public static final int SNAPSHOTS_FIELD_NUMBER = 1; private java.util.List snapshots_; /** @@ -38378,13 +40686,11 @@ public final class MasterProtos { return snapshots_.get(index); } - private void initFields() { - snapshots_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getSnapshotsCount(); i++) { if (!getSnapshots(i).isInitialized()) { @@ -38398,16 +40704,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < snapshots_.size(); i++) { output.writeMessage(1, snapshots_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -38415,19 +40719,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, snapshots_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -38440,12 +40738,10 @@ public final class MasterProtos { boolean result = true; result = result && getSnapshotsList() .equals(other.getSnapshotsList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -38457,7 +40753,7 @@ public final class MasterProtos { hash = (37 * hash) + SNAPSHOTS_FIELD_NUMBER; hash = (53 * hash) + getSnapshotsList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -38485,46 +40781,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -38532,14 +40839,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetCompletedSnapshotsResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetCompletedSnapshotsResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -38552,19 +40860,16 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSnapshotsFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (snapshotsBuilder_ == null) { @@ -38576,10 +40881,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor; @@ -38613,6 +40914,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse)other); @@ -38643,21 +40970,21 @@ public final class MasterProtos { snapshots_ = other.snapshots_; bitField0_ = (bitField0_ & ~0x00000001); snapshotsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSnapshotsFieldBuilder() : null; } else { snapshotsBuilder_.addAllMessages(other.snapshots_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getSnapshotsCount(); i++) { if (!getSnapshots(i).isInitialized()) { - return false; } } @@ -38673,7 +41000,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -38683,7 +41010,6 @@ public final class MasterProtos { } private int bitField0_; - // repeated .hbase.pb.SnapshotDescription snapshots = 1; private java.util.List snapshots_ = java.util.Collections.emptyList(); private void ensureSnapshotsIsMutable() { @@ -38693,7 +41019,7 @@ public final class MasterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotsBuilder_; /** @@ -38825,7 +41151,8 @@ public final class MasterProtos { java.lang.Iterable values) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); - super.addAll(values, snapshots_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, snapshots_); onChanged(); } else { snapshotsBuilder_.addAllMessages(values); @@ -38908,11 +41235,11 @@ public final class MasterProtos { getSnapshotsBuilderList() { return getSnapshotsFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotsFieldBuilder() { if (snapshotsBuilder_ == null) { - snapshotsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + snapshotsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( snapshots_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -38922,22 +41249,59 @@ public final class MasterProtos { } return snapshotsBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetCompletedSnapshotsResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetCompletedSnapshotsResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetCompletedSnapshotsResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetCompletedSnapshotsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetCompletedSnapshotsResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetCompletedSnapshotsResponse) } - public interface DeleteSnapshotRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteSnapshotRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteSnapshotRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.SnapshotDescription snapshot = 1; /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -38954,36 +41318,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.DeleteSnapshotRequest} */ - public static final class DeleteSnapshotRequest extends - com.google.protobuf.GeneratedMessage - implements DeleteSnapshotRequestOrBuilder { + public static final class DeleteSnapshotRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteSnapshotRequest) + DeleteSnapshotRequestOrBuilder { // Use DeleteSnapshotRequest.newBuilder() to construct. - private DeleteSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteSnapshotRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private DeleteSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteSnapshotRequest defaultInstance; - public static DeleteSnapshotRequest getDefaultInstance() { - return defaultInstance; - } - - public DeleteSnapshotRequest getDefaultInstanceForType() { - return defaultInstance; + private DeleteSnapshotRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteSnapshotRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -39021,7 +41376,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -39032,30 +41387,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteSnapshotRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteSnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteSnapshotRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteSnapshotRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** @@ -39068,22 +41407,20 @@ public final class MasterProtos { * required .hbase.pb.SnapshotDescription snapshot = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } - private void initFields() { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasSnapshot()) { memoizedIsInitialized = 0; @@ -39099,36 +41436,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, snapshot_); + output.writeMessage(1, getSnapshot()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, snapshot_); + .computeMessageSize(1, getSnapshot()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -39144,12 +41473,10 @@ public final class MasterProtos { result = result && getSnapshot() .equals(other.getSnapshot()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -39161,7 +41488,7 @@ public final class MasterProtos { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -39189,46 +41516,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -39236,14 +41574,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DeleteSnapshotRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteSnapshotRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteSnapshotRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteSnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -39256,23 +41595,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; } else { snapshotBuilder_.clear(); } @@ -39280,10 +41616,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteSnapshotRequest_descriptor; @@ -39318,6 +41650,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest)other); @@ -39332,17 +41690,16 @@ public final class MasterProtos { if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasSnapshot()) { - return false; } if (!getSnapshot().isInitialized()) { - return false; } return true; @@ -39357,7 +41714,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -39367,9 +41724,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.SnapshotDescription snapshot = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * required .hbase.pb.SnapshotDescription snapshot = 1; @@ -39382,7 +41738,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } else { return snapshotBuilder_.getMessage(); } @@ -39423,6 +41779,7 @@ public final class MasterProtos { public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + snapshot_ != null && snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); @@ -39441,7 +41798,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; onChanged(); } else { snapshotBuilder_.clear(); @@ -39464,73 +41821,103 @@ public final class MasterProtos { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { - return snapshot_; + return snapshot_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } } /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { - snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< + snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( - snapshot_, + getSnapshot(), getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteSnapshotRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteSnapshotRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest DEFAULT_INSTANCE; static { - defaultInstance = new DeleteSnapshotRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteSnapshotRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteSnapshotRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteSnapshotRequest) } - public interface DeleteSnapshotResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeleteSnapshotResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteSnapshotResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.DeleteSnapshotResponse} */ - public static final class DeleteSnapshotResponse extends - com.google.protobuf.GeneratedMessage - implements DeleteSnapshotResponseOrBuilder { + public static final class DeleteSnapshotResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeleteSnapshotResponse) + DeleteSnapshotResponseOrBuilder { // Use DeleteSnapshotResponse.newBuilder() to construct. - private DeleteSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeleteSnapshotResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private DeleteSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeleteSnapshotResponse defaultInstance; - public static DeleteSnapshotResponse getDefaultInstance() { - return defaultInstance; - } - - public DeleteSnapshotResponse getDefaultInstanceForType() { - return defaultInstance; + private DeleteSnapshotResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeleteSnapshotResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -39554,7 +41941,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -39565,34 +41952,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteSnapshotResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteSnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeleteSnapshotResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteSnapshotResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -39600,29 +41971,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -39633,12 +41996,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -39646,7 +42007,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -39674,46 +42035,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -39721,14 +42093,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.DeleteSnapshotResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteSnapshotResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteSnapshotResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteSnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -39741,27 +42114,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteSnapshotResponse_descriptor; @@ -39785,6 +42151,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse)other); @@ -39796,7 +42188,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -39813,7 +42206,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -39821,22 +42214,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteSnapshotResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.DeleteSnapshotResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse DEFAULT_INSTANCE; static { - defaultInstance = new DeleteSnapshotResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeleteSnapshotResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteSnapshotResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeleteSnapshotResponse) } - public interface RestoreSnapshotRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RestoreSnapshotRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RestoreSnapshotRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.SnapshotDescription snapshot = 1; /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -39850,7 +42280,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); - // optional uint64 nonce_group = 2 [default = 0]; /** * optional uint64 nonce_group = 2 [default = 0]; */ @@ -39860,7 +42289,6 @@ public final class MasterProtos { */ long getNonceGroup(); - // optional uint64 nonce = 3 [default = 0]; /** * optional uint64 nonce = 3 [default = 0]; */ @@ -39873,36 +42301,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.RestoreSnapshotRequest} */ - public static final class RestoreSnapshotRequest extends - com.google.protobuf.GeneratedMessage - implements RestoreSnapshotRequestOrBuilder { + public static final class RestoreSnapshotRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RestoreSnapshotRequest) + RestoreSnapshotRequestOrBuilder { // Use RestoreSnapshotRequest.newBuilder() to construct. - private RestoreSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private RestoreSnapshotRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RestoreSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RestoreSnapshotRequest defaultInstance; - public static RestoreSnapshotRequest getDefaultInstance() { - return defaultInstance; - } - - public RestoreSnapshotRequest getDefaultInstanceForType() { - return defaultInstance; + private RestoreSnapshotRequest() { + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RestoreSnapshotRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -39950,7 +42371,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -39961,30 +42382,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RestoreSnapshotRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RestoreSnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RestoreSnapshotRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RestoreSnapshotRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** @@ -39997,16 +42402,15 @@ public final class MasterProtos { * required .hbase.pb.SnapshotDescription snapshot = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } - // optional uint64 nonce_group = 2 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 2; private long nonceGroup_; /** @@ -40022,7 +42426,6 @@ public final class MasterProtos { return nonceGroup_; } - // optional uint64 nonce = 3 [default = 0]; public static final int NONCE_FIELD_NUMBER = 3; private long nonce_; /** @@ -40038,15 +42441,11 @@ public final class MasterProtos { return nonce_; } - private void initFields() { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasSnapshot()) { memoizedIsInitialized = 0; @@ -40062,9 +42461,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, snapshot_); + output.writeMessage(1, getSnapshot()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, nonceGroup_); @@ -40072,18 +42470,17 @@ public final class MasterProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, snapshot_); + .computeMessageSize(1, getSnapshot()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -40093,19 +42490,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -40131,12 +42522,10 @@ public final class MasterProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -40150,13 +42539,15 @@ public final class MasterProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -40184,46 +42575,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -40231,14 +42633,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.RestoreSnapshotRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RestoreSnapshotRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RestoreSnapshotRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RestoreSnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -40251,23 +42654,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; } else { snapshotBuilder_.clear(); } @@ -40279,10 +42679,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RestoreSnapshotRequest_descriptor; @@ -40325,6 +42721,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest)other); @@ -40345,17 +42767,16 @@ public final class MasterProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasSnapshot()) { - return false; } if (!getSnapshot().isInitialized()) { - return false; } return true; @@ -40370,7 +42791,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -40380,9 +42801,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.SnapshotDescription snapshot = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * required .hbase.pb.SnapshotDescription snapshot = 1; @@ -40395,7 +42815,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } else { return snapshotBuilder_.getMessage(); } @@ -40436,6 +42856,7 @@ public final class MasterProtos { public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + snapshot_ != null && snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); @@ -40454,7 +42875,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; onChanged(); } else { snapshotBuilder_.clear(); @@ -40477,19 +42898,20 @@ public final class MasterProtos { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { - return snapshot_; + return snapshot_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } } /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { - snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< + snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( - snapshot_, + getSnapshot(), getParentForChildren(), isClean()); snapshot_ = null; @@ -40497,7 +42919,6 @@ public final class MasterProtos { return snapshotBuilder_; } - // optional uint64 nonce_group = 2 [default = 0]; private long nonceGroup_ ; /** * optional uint64 nonce_group = 2 [default = 0]; @@ -40530,7 +42951,6 @@ public final class MasterProtos { return this; } - // optional uint64 nonce = 3 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 3 [default = 0]; @@ -40562,22 +42982,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RestoreSnapshotRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.RestoreSnapshotRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest DEFAULT_INSTANCE; static { - defaultInstance = new RestoreSnapshotRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RestoreSnapshotRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RestoreSnapshotRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RestoreSnapshotRequest) } - public interface RestoreSnapshotResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RestoreSnapshotResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RestoreSnapshotResponse) + com.google.protobuf.MessageOrBuilder { - // required uint64 proc_id = 1; /** * required uint64 proc_id = 1; */ @@ -40590,36 +43047,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.RestoreSnapshotResponse} */ - public static final class RestoreSnapshotResponse extends - com.google.protobuf.GeneratedMessage - implements RestoreSnapshotResponseOrBuilder { + public static final class RestoreSnapshotResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RestoreSnapshotResponse) + RestoreSnapshotResponseOrBuilder { // Use RestoreSnapshotResponse.newBuilder() to construct. - private RestoreSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private RestoreSnapshotResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RestoreSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RestoreSnapshotResponse defaultInstance; - public static RestoreSnapshotResponse getDefaultInstance() { - return defaultInstance; - } - - public RestoreSnapshotResponse getDefaultInstanceForType() { - return defaultInstance; + private RestoreSnapshotResponse() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RestoreSnapshotResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -40649,7 +43098,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -40660,30 +43109,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RestoreSnapshotResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RestoreSnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RestoreSnapshotResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RestoreSnapshotResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -40699,13 +43132,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasProcId()) { memoizedIsInitialized = 0; @@ -40717,16 +43148,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -40734,19 +43163,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -40762,12 +43185,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -40777,9 +43198,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -40807,46 +43229,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -40854,14 +43287,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.RestoreSnapshotResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RestoreSnapshotResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RestoreSnapshotResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RestoreSnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -40874,18 +43308,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -40893,10 +43324,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RestoreSnapshotResponse_descriptor; @@ -40927,6 +43354,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse)other); @@ -40941,13 +43394,13 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasProcId()) { - return false; } return true; @@ -40962,7 +43415,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -40972,7 +43425,6 @@ public final class MasterProtos { } private int bitField0_; - // required uint64 proc_id = 1; private long procId_ ; /** * required uint64 proc_id = 1; @@ -41004,22 +43456,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RestoreSnapshotResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.RestoreSnapshotResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse DEFAULT_INSTANCE; static { - defaultInstance = new RestoreSnapshotResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RestoreSnapshotResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RestoreSnapshotResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RestoreSnapshotResponse) } - public interface IsSnapshotDoneRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsSnapshotDoneRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsSnapshotDoneRequest) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.SnapshotDescription snapshot = 1; /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -41034,43 +43523,34 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** - * Protobuf type {@code hbase.pb.IsSnapshotDoneRequest} - * *
    * if you don't send the snapshot, then you will get it back
    * in the response (if the snapshot is done) so you can check the snapshot
    * 
+ * + * Protobuf type {@code hbase.pb.IsSnapshotDoneRequest} */ - public static final class IsSnapshotDoneRequest extends - com.google.protobuf.GeneratedMessage - implements IsSnapshotDoneRequestOrBuilder { + public static final class IsSnapshotDoneRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsSnapshotDoneRequest) + IsSnapshotDoneRequestOrBuilder { // Use IsSnapshotDoneRequest.newBuilder() to construct. - private IsSnapshotDoneRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsSnapshotDoneRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private IsSnapshotDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsSnapshotDoneRequest defaultInstance; - public static IsSnapshotDoneRequest getDefaultInstance() { - return defaultInstance; + private IsSnapshotDoneRequest() { } - public IsSnapshotDoneRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsSnapshotDoneRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -41108,7 +43588,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -41119,30 +43599,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSnapshotDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsSnapshotDoneRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsSnapshotDoneRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** @@ -41155,22 +43619,20 @@ public final class MasterProtos { * optional .hbase.pb.SnapshotDescription snapshot = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } - private void initFields() { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { @@ -41184,36 +43646,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, snapshot_); + output.writeMessage(1, getSnapshot()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, snapshot_); + .computeMessageSize(1, getSnapshot()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -41229,12 +43683,10 @@ public final class MasterProtos { result = result && getSnapshot() .equals(other.getSnapshot()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -41246,7 +43698,7 @@ public final class MasterProtos { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -41274,66 +43726,78 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.IsSnapshotDoneRequest} - * *
      * if you don't send the snapshot, then you will get it back
      * in the response (if the snapshot is done) so you can check the snapshot
      * 
+ * + * Protobuf type {@code hbase.pb.IsSnapshotDoneRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsSnapshotDoneRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSnapshotDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -41346,23 +43810,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; } else { snapshotBuilder_.clear(); } @@ -41370,10 +43831,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor; @@ -41408,6 +43865,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)other); @@ -41422,14 +43905,14 @@ public final class MasterProtos { if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { - return false; } } @@ -41445,7 +43928,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -41455,9 +43938,8 @@ public final class MasterProtos { } private int bitField0_; - // optional .hbase.pb.SnapshotDescription snapshot = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * optional .hbase.pb.SnapshotDescription snapshot = 1; @@ -41470,7 +43952,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } else { return snapshotBuilder_.getMessage(); } @@ -41511,6 +43993,7 @@ public final class MasterProtos { public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + snapshot_ != null && snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); @@ -41529,7 +44012,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; onChanged(); } else { snapshotBuilder_.clear(); @@ -41552,41 +44035,79 @@ public final class MasterProtos { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { - return snapshot_; + return snapshot_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } } /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { - snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< + snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( - snapshot_, + getSnapshot(), getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsSnapshotDoneRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsSnapshotDoneRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest DEFAULT_INSTANCE; static { - defaultInstance = new IsSnapshotDoneRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsSnapshotDoneRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsSnapshotDoneRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsSnapshotDoneRequest) } - public interface IsSnapshotDoneResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsSnapshotDoneResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsSnapshotDoneResponse) + com.google.protobuf.MessageOrBuilder { - // optional bool done = 1 [default = false]; /** * optional bool done = 1 [default = false]; */ @@ -41596,7 +44117,6 @@ public final class MasterProtos { */ boolean getDone(); - // optional .hbase.pb.SnapshotDescription snapshot = 2; /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ @@ -41613,36 +44133,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsSnapshotDoneResponse} */ - public static final class IsSnapshotDoneResponse extends - com.google.protobuf.GeneratedMessage - implements IsSnapshotDoneResponseOrBuilder { + public static final class IsSnapshotDoneResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsSnapshotDoneResponse) + IsSnapshotDoneResponseOrBuilder { // Use IsSnapshotDoneResponse.newBuilder() to construct. - private IsSnapshotDoneResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsSnapshotDoneResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IsSnapshotDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsSnapshotDoneResponse defaultInstance; - public static IsSnapshotDoneResponse getDefaultInstance() { - return defaultInstance; } - - public IsSnapshotDoneResponse getDefaultInstanceForType() { - return defaultInstance; + private IsSnapshotDoneResponse() { + done_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsSnapshotDoneResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -41685,7 +44197,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -41696,30 +44208,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSnapshotDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsSnapshotDoneResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsSnapshotDoneResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool done = 1 [default = false]; public static final int DONE_FIELD_NUMBER = 1; private boolean done_; /** @@ -41735,7 +44231,6 @@ public final class MasterProtos { return done_; } - // optional .hbase.pb.SnapshotDescription snapshot = 2; public static final int SNAPSHOT_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** @@ -41748,23 +44243,20 @@ public final class MasterProtos { * optional .hbase.pb.SnapshotDescription snapshot = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } - private void initFields() { - done_ = false; - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { @@ -41778,19 +44270,17 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, done_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, snapshot_); + output.writeMessage(2, getSnapshot()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -41800,21 +44290,15 @@ public final class MasterProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, snapshot_); + .computeMessageSize(2, getSnapshot()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -41835,12 +44319,10 @@ public final class MasterProtos { result = result && getSnapshot() .equals(other.getSnapshot()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -41850,13 +44332,14 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDone()) { hash = (37 * hash) + DONE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getDone()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDone()); } if (hasSnapshot()) { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -41884,46 +44367,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -41931,14 +44425,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsSnapshotDoneResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsSnapshotDoneResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSnapshotDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -41951,25 +44446,22 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); done_ = false; bitField0_ = (bitField0_ & ~0x00000001); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; } else { snapshotBuilder_.clear(); } @@ -41977,10 +44469,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor; @@ -42019,6 +44507,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse)other); @@ -42036,14 +44550,14 @@ public final class MasterProtos { if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { - return false; } } @@ -42059,7 +44573,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -42069,7 +44583,6 @@ public final class MasterProtos { } private int bitField0_; - // optional bool done = 1 [default = false]; private boolean done_ ; /** * optional bool done = 1 [default = false]; @@ -42102,9 +44615,8 @@ public final class MasterProtos { return this; } - // optional .hbase.pb.SnapshotDescription snapshot = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * optional .hbase.pb.SnapshotDescription snapshot = 2; @@ -42117,7 +44629,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } else { return snapshotBuilder_.getMessage(); } @@ -42158,6 +44670,7 @@ public final class MasterProtos { public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + snapshot_ != null && snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); @@ -42176,7 +44689,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; onChanged(); } else { snapshotBuilder_.clear(); @@ -42199,41 +44712,79 @@ public final class MasterProtos { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { - return snapshot_; + return snapshot_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } } /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { - snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< + snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( - snapshot_, + getSnapshot(), getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsSnapshotDoneResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsSnapshotDoneResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse DEFAULT_INSTANCE; static { - defaultInstance = new IsSnapshotDoneResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsSnapshotDoneResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsSnapshotDoneResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsSnapshotDoneResponse) } - public interface IsRestoreSnapshotDoneRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsRestoreSnapshotDoneRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsRestoreSnapshotDoneRequest) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.SnapshotDescription snapshot = 1; /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -42250,36 +44801,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsRestoreSnapshotDoneRequest} */ - public static final class IsRestoreSnapshotDoneRequest extends - com.google.protobuf.GeneratedMessage - implements IsRestoreSnapshotDoneRequestOrBuilder { + public static final class IsRestoreSnapshotDoneRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsRestoreSnapshotDoneRequest) + IsRestoreSnapshotDoneRequestOrBuilder { // Use IsRestoreSnapshotDoneRequest.newBuilder() to construct. - private IsRestoreSnapshotDoneRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsRestoreSnapshotDoneRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private IsRestoreSnapshotDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsRestoreSnapshotDoneRequest defaultInstance; - public static IsRestoreSnapshotDoneRequest getDefaultInstance() { - return defaultInstance; - } - - public IsRestoreSnapshotDoneRequest getDefaultInstanceForType() { - return defaultInstance; + private IsRestoreSnapshotDoneRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsRestoreSnapshotDoneRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -42317,7 +44859,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -42328,30 +44870,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsRestoreSnapshotDoneRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsRestoreSnapshotDoneRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** @@ -42364,22 +44890,20 @@ public final class MasterProtos { * optional .hbase.pb.SnapshotDescription snapshot = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } - private void initFields() { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { @@ -42393,36 +44917,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, snapshot_); + output.writeMessage(1, getSnapshot()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, snapshot_); + .computeMessageSize(1, getSnapshot()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -42438,12 +44954,10 @@ public final class MasterProtos { result = result && getSnapshot() .equals(other.getSnapshot()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -42455,7 +44969,7 @@ public final class MasterProtos { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -42483,46 +44997,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -42530,14 +45055,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsRestoreSnapshotDoneRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsRestoreSnapshotDoneRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -42550,23 +45076,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; } else { snapshotBuilder_.clear(); } @@ -42574,10 +45097,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor; @@ -42612,6 +45131,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)other); @@ -42626,14 +45171,14 @@ public final class MasterProtos { if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { - return false; } } @@ -42649,7 +45194,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -42659,9 +45204,8 @@ public final class MasterProtos { } private int bitField0_; - // optional .hbase.pb.SnapshotDescription snapshot = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * optional .hbase.pb.SnapshotDescription snapshot = 1; @@ -42674,7 +45218,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } else { return snapshotBuilder_.getMessage(); } @@ -42715,6 +45259,7 @@ public final class MasterProtos { public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + snapshot_ != null && snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); @@ -42733,7 +45278,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = null; onChanged(); } else { snapshotBuilder_.clear(); @@ -42756,41 +45301,79 @@ public final class MasterProtos { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { - return snapshot_; + return snapshot_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; } } /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { - snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< + snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( - snapshot_, + getSnapshot(), getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsRestoreSnapshotDoneRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsRestoreSnapshotDoneRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest DEFAULT_INSTANCE; static { - defaultInstance = new IsRestoreSnapshotDoneRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsRestoreSnapshotDoneRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsRestoreSnapshotDoneRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsRestoreSnapshotDoneRequest) } - public interface IsRestoreSnapshotDoneResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsRestoreSnapshotDoneResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsRestoreSnapshotDoneResponse) + com.google.protobuf.MessageOrBuilder { - // optional bool done = 1 [default = false]; /** * optional bool done = 1 [default = false]; */ @@ -42803,36 +45386,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsRestoreSnapshotDoneResponse} */ - public static final class IsRestoreSnapshotDoneResponse extends - com.google.protobuf.GeneratedMessage - implements IsRestoreSnapshotDoneResponseOrBuilder { + public static final class IsRestoreSnapshotDoneResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsRestoreSnapshotDoneResponse) + IsRestoreSnapshotDoneResponseOrBuilder { // Use IsRestoreSnapshotDoneResponse.newBuilder() to construct. - private IsRestoreSnapshotDoneResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsRestoreSnapshotDoneResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IsRestoreSnapshotDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsRestoreSnapshotDoneResponse defaultInstance; - public static IsRestoreSnapshotDoneResponse getDefaultInstance() { - return defaultInstance; } - - public IsRestoreSnapshotDoneResponse getDefaultInstanceForType() { - return defaultInstance; + private IsRestoreSnapshotDoneResponse() { + done_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsRestoreSnapshotDoneResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -42862,7 +45437,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -42873,30 +45448,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsRestoreSnapshotDoneResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsRestoreSnapshotDoneResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool done = 1 [default = false]; public static final int DONE_FIELD_NUMBER = 1; private boolean done_; /** @@ -42912,13 +45471,11 @@ public final class MasterProtos { return done_; } - private void initFields() { - done_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -42926,16 +45483,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, done_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -42943,19 +45498,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, done_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -42971,12 +45520,10 @@ public final class MasterProtos { result = result && (getDone() == other.getDone()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -42986,9 +45533,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDone()) { hash = (37 * hash) + DONE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getDone()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDone()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -43016,46 +45564,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -43063,14 +45622,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsRestoreSnapshotDoneResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsRestoreSnapshotDoneResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -43083,18 +45643,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); done_ = false; @@ -43102,10 +45659,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor; @@ -43136,6 +45689,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse)other); @@ -43150,7 +45729,8 @@ public final class MasterProtos { if (other.hasDone()) { setDone(other.getDone()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -43167,7 +45747,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -43177,7 +45757,6 @@ public final class MasterProtos { } private int bitField0_; - // optional bool done = 1 [default = false]; private boolean done_ ; /** * optional bool done = 1 [default = false]; @@ -43209,22 +45788,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsRestoreSnapshotDoneResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsRestoreSnapshotDoneResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse DEFAULT_INSTANCE; static { - defaultInstance = new IsRestoreSnapshotDoneResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsRestoreSnapshotDoneResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsRestoreSnapshotDoneResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsRestoreSnapshotDoneResponse) } - public interface GetSchemaAlterStatusRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetSchemaAlterStatusRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetSchemaAlterStatusRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -43241,36 +45857,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetSchemaAlterStatusRequest} */ - public static final class GetSchemaAlterStatusRequest extends - com.google.protobuf.GeneratedMessage - implements GetSchemaAlterStatusRequestOrBuilder { + public static final class GetSchemaAlterStatusRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetSchemaAlterStatusRequest) + GetSchemaAlterStatusRequestOrBuilder { // Use GetSchemaAlterStatusRequest.newBuilder() to construct. - private GetSchemaAlterStatusRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetSchemaAlterStatusRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetSchemaAlterStatusRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetSchemaAlterStatusRequest defaultInstance; - public static GetSchemaAlterStatusRequest getDefaultInstance() { - return defaultInstance; - } - - public GetSchemaAlterStatusRequest getDefaultInstanceForType() { - return defaultInstance; + private GetSchemaAlterStatusRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetSchemaAlterStatusRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -43308,7 +45915,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -43319,30 +45926,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetSchemaAlterStatusRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetSchemaAlterStatusRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetSchemaAlterStatusRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -43355,22 +45946,20 @@ public final class MasterProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -43386,36 +45975,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -43431,12 +46012,10 @@ public final class MasterProtos { result = result && getTableName() .equals(other.getTableName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -43448,7 +46027,7 @@ public final class MasterProtos { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -43476,46 +46055,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -43523,14 +46113,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetSchemaAlterStatusRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetSchemaAlterStatusRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetSchemaAlterStatusRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -43543,23 +46134,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -43567,10 +46155,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor; @@ -43605,6 +46189,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)other); @@ -43619,17 +46229,16 @@ public final class MasterProtos { if (other.hasTableName()) { mergeTableName(other.getTableName()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -43644,7 +46253,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -43654,9 +46263,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -43669,7 +46277,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -43710,6 +46318,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -43728,7 +46337,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -43751,41 +46360,79 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetSchemaAlterStatusRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetSchemaAlterStatusRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetSchemaAlterStatusRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetSchemaAlterStatusRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetSchemaAlterStatusRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetSchemaAlterStatusRequest) } - public interface GetSchemaAlterStatusResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetSchemaAlterStatusResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetSchemaAlterStatusResponse) + com.google.protobuf.MessageOrBuilder { - // optional uint32 yet_to_update_regions = 1; /** * optional uint32 yet_to_update_regions = 1; */ @@ -43795,7 +46442,6 @@ public final class MasterProtos { */ int getYetToUpdateRegions(); - // optional uint32 total_regions = 2; /** * optional uint32 total_regions = 2; */ @@ -43808,36 +46454,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetSchemaAlterStatusResponse} */ - public static final class GetSchemaAlterStatusResponse extends - com.google.protobuf.GeneratedMessage - implements GetSchemaAlterStatusResponseOrBuilder { + public static final class GetSchemaAlterStatusResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetSchemaAlterStatusResponse) + GetSchemaAlterStatusResponseOrBuilder { // Use GetSchemaAlterStatusResponse.newBuilder() to construct. - private GetSchemaAlterStatusResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetSchemaAlterStatusResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetSchemaAlterStatusResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetSchemaAlterStatusResponse defaultInstance; - public static GetSchemaAlterStatusResponse getDefaultInstance() { - return defaultInstance; } - - public GetSchemaAlterStatusResponse getDefaultInstanceForType() { - return defaultInstance; + private GetSchemaAlterStatusResponse() { + yetToUpdateRegions_ = 0; + totalRegions_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetSchemaAlterStatusResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -43872,7 +46511,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -43883,30 +46522,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetSchemaAlterStatusResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetSchemaAlterStatusResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetSchemaAlterStatusResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint32 yet_to_update_regions = 1; public static final int YET_TO_UPDATE_REGIONS_FIELD_NUMBER = 1; private int yetToUpdateRegions_; /** @@ -43922,7 +46545,6 @@ public final class MasterProtos { return yetToUpdateRegions_; } - // optional uint32 total_regions = 2; public static final int TOTAL_REGIONS_FIELD_NUMBER = 2; private int totalRegions_; /** @@ -43938,14 +46560,11 @@ public final class MasterProtos { return totalRegions_; } - private void initFields() { - yetToUpdateRegions_ = 0; - totalRegions_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -43953,19 +46572,17 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, yetToUpdateRegions_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, totalRegions_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -43977,19 +46594,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(2, totalRegions_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -44010,12 +46621,10 @@ public final class MasterProtos { result = result && (getTotalRegions() == other.getTotalRegions()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -44031,7 +46640,7 @@ public final class MasterProtos { hash = (37 * hash) + TOTAL_REGIONS_FIELD_NUMBER; hash = (53 * hash) + getTotalRegions(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -44059,46 +46668,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -44106,14 +46726,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetSchemaAlterStatusResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetSchemaAlterStatusResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetSchemaAlterStatusResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -44126,18 +46747,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); yetToUpdateRegions_ = 0; @@ -44147,10 +46765,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor; @@ -44185,6 +46799,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse)other); @@ -44202,7 +46842,8 @@ public final class MasterProtos { if (other.hasTotalRegions()) { setTotalRegions(other.getTotalRegions()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -44219,7 +46860,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -44229,7 +46870,6 @@ public final class MasterProtos { } private int bitField0_; - // optional uint32 yet_to_update_regions = 1; private int yetToUpdateRegions_ ; /** * optional uint32 yet_to_update_regions = 1; @@ -44262,7 +46902,6 @@ public final class MasterProtos { return this; } - // optional uint32 total_regions = 2; private int totalRegions_ ; /** * optional uint32 total_regions = 2; @@ -44294,22 +46933,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetSchemaAlterStatusResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetSchemaAlterStatusResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetSchemaAlterStatusResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetSchemaAlterStatusResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetSchemaAlterStatusResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetSchemaAlterStatusResponse) } - public interface GetTableDescriptorsRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetTableDescriptorsRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetTableDescriptorsRequest) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.TableName table_names = 1; /** * repeated .hbase.pb.TableName table_names = 1; */ @@ -44334,7 +47010,6 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder( int index); - // optional string regex = 2; /** * optional string regex = 2; */ @@ -44349,7 +47024,6 @@ public final class MasterProtos { com.google.protobuf.ByteString getRegexBytes(); - // optional bool include_sys_tables = 3 [default = false]; /** * optional bool include_sys_tables = 3 [default = false]; */ @@ -44359,7 +47033,6 @@ public final class MasterProtos { */ boolean getIncludeSysTables(); - // optional string namespace = 4; /** * optional string namespace = 4; */ @@ -44377,36 +47050,31 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetTableDescriptorsRequest} */ - public static final class GetTableDescriptorsRequest extends - com.google.protobuf.GeneratedMessage - implements GetTableDescriptorsRequestOrBuilder { + public static final class GetTableDescriptorsRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetTableDescriptorsRequest) + GetTableDescriptorsRequestOrBuilder { // Use GetTableDescriptorsRequest.newBuilder() to construct. - private GetTableDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetTableDescriptorsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetTableDescriptorsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetTableDescriptorsRequest defaultInstance; - public static GetTableDescriptorsRequest getDefaultInstance() { - return defaultInstance; } - - public GetTableDescriptorsRequest getDefaultInstanceForType() { - return defaultInstance; + private GetTableDescriptorsRequest() { + tableNames_ = java.util.Collections.emptyList(); + regex_ = ""; + includeSysTables_ = false; + namespace_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetTableDescriptorsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -44430,12 +47098,14 @@ public final class MasterProtos { tableNames_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - tableNames_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry)); + tableNames_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry)); break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - regex_ = input.readBytes(); + regex_ = bs; break; } case 24: { @@ -44444,8 +47114,9 @@ public final class MasterProtos { break; } case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - namespace_ = input.readBytes(); + namespace_ = bs; break; } } @@ -44454,7 +47125,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableNames_ = java.util.Collections.unmodifiableList(tableNames_); @@ -44468,30 +47139,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableDescriptorsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetTableDescriptorsRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetTableDescriptorsRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // repeated .hbase.pb.TableName table_names = 1; public static final int TABLE_NAMES_FIELD_NUMBER = 1; private java.util.List tableNames_; /** @@ -44527,9 +47182,8 @@ public final class MasterProtos { return tableNames_.get(index); } - // optional string regex = 2; public static final int REGEX_FIELD_NUMBER = 2; - private java.lang.Object regex_; + private volatile java.lang.Object regex_; /** * optional string regex = 2; */ @@ -44570,7 +47224,6 @@ public final class MasterProtos { } } - // optional bool include_sys_tables = 3 [default = false]; public static final int INCLUDE_SYS_TABLES_FIELD_NUMBER = 3; private boolean includeSysTables_; /** @@ -44586,9 +47239,8 @@ public final class MasterProtos { return includeSysTables_; } - // optional string namespace = 4; public static final int NAMESPACE_FIELD_NUMBER = 4; - private java.lang.Object namespace_; + private volatile java.lang.Object namespace_; /** * optional string namespace = 4; */ @@ -44629,16 +47281,11 @@ public final class MasterProtos { } } - private void initFields() { - tableNames_ = java.util.Collections.emptyList(); - regex_ = ""; - includeSysTables_ = false; - namespace_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getTableNamesCount(); i++) { if (!getTableNames(i).isInitialized()) { @@ -44652,25 +47299,23 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < tableNames_.size(); i++) { output.writeMessage(1, tableNames_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(2, getRegexBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, regex_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(3, includeSysTables_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(4, getNamespaceBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, namespace_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -44679,30 +47324,22 @@ public final class MasterProtos { .computeMessageSize(1, tableNames_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getRegexBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, regex_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, includeSysTables_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getNamespaceBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, namespace_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -44730,12 +47367,10 @@ public final class MasterProtos { result = result && getNamespace() .equals(other.getNamespace()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -44753,13 +47388,14 @@ public final class MasterProtos { } if (hasIncludeSysTables()) { hash = (37 * hash) + INCLUDE_SYS_TABLES_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getIncludeSysTables()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIncludeSysTables()); } if (hasNamespace()) { hash = (37 * hash) + NAMESPACE_FIELD_NUMBER; hash = (53 * hash) + getNamespace().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -44787,46 +47423,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -44834,14 +47481,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetTableDescriptorsRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetTableDescriptorsRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableDescriptorsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -44854,19 +47502,16 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNamesFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNamesBuilder_ == null) { @@ -44884,10 +47529,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor; @@ -44935,6 +47576,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)other); @@ -44965,7 +47632,7 @@ public final class MasterProtos { tableNames_ = other.tableNames_; bitField0_ = (bitField0_ & ~0x00000001); tableNamesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTableNamesFieldBuilder() : null; } else { tableNamesBuilder_.addAllMessages(other.tableNames_); @@ -44985,14 +47652,14 @@ public final class MasterProtos { namespace_ = other.namespace_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getTableNamesCount(); i++) { if (!getTableNames(i).isInitialized()) { - return false; } } @@ -45008,7 +47675,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -45018,7 +47685,6 @@ public final class MasterProtos { } private int bitField0_; - // repeated .hbase.pb.TableName table_names = 1; private java.util.List tableNames_ = java.util.Collections.emptyList(); private void ensureTableNamesIsMutable() { @@ -45028,7 +47694,7 @@ public final class MasterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNamesBuilder_; /** @@ -45160,7 +47826,8 @@ public final class MasterProtos { java.lang.Iterable values) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); - super.addAll(values, tableNames_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, tableNames_); onChanged(); } else { tableNamesBuilder_.addAllMessages(values); @@ -45243,11 +47910,11 @@ public final class MasterProtos { getTableNamesBuilderList() { return getTableNamesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNamesFieldBuilder() { if (tableNamesBuilder_ == null) { - tableNamesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + tableNamesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableNames_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -45258,7 +47925,6 @@ public final class MasterProtos { return tableNamesBuilder_; } - // optional string regex = 2; private java.lang.Object regex_ = ""; /** * optional string regex = 2; @@ -45272,9 +47938,12 @@ public final class MasterProtos { public java.lang.String getRegex() { java.lang.Object ref = regex_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - regex_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + regex_ = s; + } return s; } else { return (java.lang.String) ref; @@ -45332,7 +48001,6 @@ public final class MasterProtos { return this; } - // optional bool include_sys_tables = 3 [default = false]; private boolean includeSysTables_ ; /** * optional bool include_sys_tables = 3 [default = false]; @@ -45365,7 +48033,6 @@ public final class MasterProtos { return this; } - // optional string namespace = 4; private java.lang.Object namespace_ = ""; /** * optional string namespace = 4; @@ -45379,9 +48046,12 @@ public final class MasterProtos { public java.lang.String getNamespace() { java.lang.Object ref = namespace_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - namespace_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + namespace_ = s; + } return s; } else { return (java.lang.String) ref; @@ -45438,22 +48108,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetTableDescriptorsRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetTableDescriptorsRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetTableDescriptorsRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetTableDescriptorsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTableDescriptorsRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetTableDescriptorsRequest) } - public interface GetTableDescriptorsResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetTableDescriptorsResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetTableDescriptorsResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.TableSchema table_schema = 1; /** * repeated .hbase.pb.TableSchema table_schema = 1; */ @@ -45481,36 +48188,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetTableDescriptorsResponse} */ - public static final class GetTableDescriptorsResponse extends - com.google.protobuf.GeneratedMessage - implements GetTableDescriptorsResponseOrBuilder { + public static final class GetTableDescriptorsResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetTableDescriptorsResponse) + GetTableDescriptorsResponseOrBuilder { // Use GetTableDescriptorsResponse.newBuilder() to construct. - private GetTableDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetTableDescriptorsResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetTableDescriptorsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetTableDescriptorsResponse defaultInstance; - public static GetTableDescriptorsResponse getDefaultInstance() { - return defaultInstance; } - - public GetTableDescriptorsResponse getDefaultInstanceForType() { - return defaultInstance; + private GetTableDescriptorsResponse() { + tableSchema_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetTableDescriptorsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -45534,7 +48233,8 @@ public final class MasterProtos { tableSchema_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - tableSchema_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry)); + tableSchema_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry)); break; } } @@ -45543,7 +48243,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); @@ -45557,29 +48257,13 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableDescriptorsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetTableDescriptorsResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetTableDescriptorsResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.TableSchema table_schema = 1; public static final int TABLE_SCHEMA_FIELD_NUMBER = 1; private java.util.List tableSchema_; /** @@ -45615,13 +48299,11 @@ public final class MasterProtos { return tableSchema_.get(index); } - private void initFields() { - tableSchema_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { @@ -45635,16 +48317,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < tableSchema_.size(); i++) { output.writeMessage(1, tableSchema_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -45652,19 +48332,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableSchema_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -45677,12 +48351,10 @@ public final class MasterProtos { boolean result = true; result = result && getTableSchemaList() .equals(other.getTableSchemaList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -45694,7 +48366,7 @@ public final class MasterProtos { hash = (37 * hash) + TABLE_SCHEMA_FIELD_NUMBER; hash = (53 * hash) + getTableSchemaList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -45722,46 +48394,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -45769,14 +48452,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetTableDescriptorsResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetTableDescriptorsResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableDescriptorsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -45789,19 +48473,16 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableSchemaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { @@ -45813,10 +48494,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor; @@ -45850,6 +48527,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse)other); @@ -45880,21 +48583,21 @@ public final class MasterProtos { tableSchema_ = other.tableSchema_; bitField0_ = (bitField0_ & ~0x00000001); tableSchemaBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTableSchemaFieldBuilder() : null; } else { tableSchemaBuilder_.addAllMessages(other.tableSchema_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { - return false; } } @@ -45910,7 +48613,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -45920,7 +48623,6 @@ public final class MasterProtos { } private int bitField0_; - // repeated .hbase.pb.TableSchema table_schema = 1; private java.util.List tableSchema_ = java.util.Collections.emptyList(); private void ensureTableSchemaIsMutable() { @@ -45930,7 +48632,7 @@ public final class MasterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** @@ -46062,7 +48764,8 @@ public final class MasterProtos { java.lang.Iterable values) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); - super.addAll(values, tableSchema_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, tableSchema_); onChanged(); } else { tableSchemaBuilder_.addAllMessages(values); @@ -46145,11 +48848,11 @@ public final class MasterProtos { getTableSchemaBuilderList() { return getTableSchemaFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( tableSchema_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -46159,22 +48862,59 @@ public final class MasterProtos { } return tableSchemaBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetTableDescriptorsResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetTableDescriptorsResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetTableDescriptorsResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetTableDescriptorsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTableDescriptorsResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetTableDescriptorsResponse) } - public interface GetTableNamesRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetTableNamesRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetTableNamesRequest) + com.google.protobuf.MessageOrBuilder { - // optional string regex = 1; /** * optional string regex = 1; */ @@ -46189,7 +48929,6 @@ public final class MasterProtos { com.google.protobuf.ByteString getRegexBytes(); - // optional bool include_sys_tables = 2 [default = false]; /** * optional bool include_sys_tables = 2 [default = false]; */ @@ -46199,7 +48938,6 @@ public final class MasterProtos { */ boolean getIncludeSysTables(); - // optional string namespace = 3; /** * optional string namespace = 3; */ @@ -46217,36 +48955,30 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetTableNamesRequest} */ - public static final class GetTableNamesRequest extends - com.google.protobuf.GeneratedMessage - implements GetTableNamesRequestOrBuilder { + public static final class GetTableNamesRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetTableNamesRequest) + GetTableNamesRequestOrBuilder { // Use GetTableNamesRequest.newBuilder() to construct. - private GetTableNamesRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetTableNamesRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetTableNamesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetTableNamesRequest defaultInstance; - public static GetTableNamesRequest getDefaultInstance() { - return defaultInstance; } - - public GetTableNamesRequest getDefaultInstanceForType() { - return defaultInstance; + private GetTableNamesRequest() { + regex_ = ""; + includeSysTables_ = false; + namespace_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetTableNamesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -46266,8 +48998,9 @@ public final class MasterProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - regex_ = input.readBytes(); + regex_ = bs; break; } case 16: { @@ -46276,8 +49009,9 @@ public final class MasterProtos { break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - namespace_ = input.readBytes(); + namespace_ = bs; break; } } @@ -46286,7 +49020,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -46297,32 +49031,16 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableNamesRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableNamesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetTableNamesRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetTableNamesRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional string regex = 1; public static final int REGEX_FIELD_NUMBER = 1; - private java.lang.Object regex_; + private volatile java.lang.Object regex_; /** * optional string regex = 1; */ @@ -46363,7 +49081,6 @@ public final class MasterProtos { } } - // optional bool include_sys_tables = 2 [default = false]; public static final int INCLUDE_SYS_TABLES_FIELD_NUMBER = 2; private boolean includeSysTables_; /** @@ -46379,9 +49096,8 @@ public final class MasterProtos { return includeSysTables_; } - // optional string namespace = 3; public static final int NAMESPACE_FIELD_NUMBER = 3; - private java.lang.Object namespace_; + private volatile java.lang.Object namespace_; /** * optional string namespace = 3; */ @@ -46422,15 +49138,11 @@ public final class MasterProtos { } } - private void initFields() { - regex_ = ""; - includeSysTables_ = false; - namespace_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -46438,50 +49150,40 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getRegexBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, regex_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, includeSysTables_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getNamespaceBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, namespace_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getRegexBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, regex_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, includeSysTables_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getNamespaceBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, namespace_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -46507,12 +49209,10 @@ public final class MasterProtos { result = result && getNamespace() .equals(other.getNamespace()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -46526,13 +49226,14 @@ public final class MasterProtos { } if (hasIncludeSysTables()) { hash = (37 * hash) + INCLUDE_SYS_TABLES_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getIncludeSysTables()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIncludeSysTables()); } if (hasNamespace()) { hash = (37 * hash) + NAMESPACE_FIELD_NUMBER; hash = (53 * hash) + getNamespace().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -46560,46 +49261,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -46607,14 +49319,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetTableNamesRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetTableNamesRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableNamesRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableNamesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -46627,18 +49340,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); regex_ = ""; @@ -46650,10 +49360,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableNamesRequest_descriptor; @@ -46692,6 +49398,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest)other); @@ -46716,7 +49448,8 @@ public final class MasterProtos { namespace_ = other.namespace_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -46733,7 +49466,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -46743,7 +49476,6 @@ public final class MasterProtos { } private int bitField0_; - // optional string regex = 1; private java.lang.Object regex_ = ""; /** * optional string regex = 1; @@ -46757,9 +49489,12 @@ public final class MasterProtos { public java.lang.String getRegex() { java.lang.Object ref = regex_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - regex_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + regex_ = s; + } return s; } else { return (java.lang.String) ref; @@ -46817,7 +49552,6 @@ public final class MasterProtos { return this; } - // optional bool include_sys_tables = 2 [default = false]; private boolean includeSysTables_ ; /** * optional bool include_sys_tables = 2 [default = false]; @@ -46850,7 +49584,6 @@ public final class MasterProtos { return this; } - // optional string namespace = 3; private java.lang.Object namespace_ = ""; /** * optional string namespace = 3; @@ -46864,9 +49597,12 @@ public final class MasterProtos { public java.lang.String getNamespace() { java.lang.Object ref = namespace_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - namespace_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + namespace_ = s; + } return s; } else { return (java.lang.String) ref; @@ -46923,22 +49659,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetTableNamesRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetTableNamesRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetTableNamesRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetTableNamesRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTableNamesRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetTableNamesRequest) } - public interface GetTableNamesResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetTableNamesResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetTableNamesResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.TableName table_names = 1; /** * repeated .hbase.pb.TableName table_names = 1; */ @@ -46966,36 +49739,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetTableNamesResponse} */ - public static final class GetTableNamesResponse extends - com.google.protobuf.GeneratedMessage - implements GetTableNamesResponseOrBuilder { + public static final class GetTableNamesResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetTableNamesResponse) + GetTableNamesResponseOrBuilder { // Use GetTableNamesResponse.newBuilder() to construct. - private GetTableNamesResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetTableNamesResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetTableNamesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetTableNamesResponse defaultInstance; - public static GetTableNamesResponse getDefaultInstance() { - return defaultInstance; } - - public GetTableNamesResponse getDefaultInstanceForType() { - return defaultInstance; + private GetTableNamesResponse() { + tableNames_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetTableNamesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -47019,7 +49784,8 @@ public final class MasterProtos { tableNames_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - tableNames_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry)); + tableNames_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry)); break; } } @@ -47028,7 +49794,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableNames_ = java.util.Collections.unmodifiableList(tableNames_); @@ -47042,29 +49808,13 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableNamesResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableNamesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetTableNamesResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetTableNamesResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.TableName table_names = 1; public static final int TABLE_NAMES_FIELD_NUMBER = 1; private java.util.List tableNames_; /** @@ -47100,13 +49850,11 @@ public final class MasterProtos { return tableNames_.get(index); } - private void initFields() { - tableNames_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getTableNamesCount(); i++) { if (!getTableNames(i).isInitialized()) { @@ -47120,16 +49868,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < tableNames_.size(); i++) { output.writeMessage(1, tableNames_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -47137,19 +49883,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableNames_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -47162,12 +49902,10 @@ public final class MasterProtos { boolean result = true; result = result && getTableNamesList() .equals(other.getTableNamesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -47179,7 +49917,7 @@ public final class MasterProtos { hash = (37 * hash) + TABLE_NAMES_FIELD_NUMBER; hash = (53 * hash) + getTableNamesList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -47207,46 +49945,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -47254,14 +50003,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetTableNamesResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetTableNamesResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableNamesResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableNamesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -47274,19 +50024,16 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNamesFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNamesBuilder_ == null) { @@ -47298,10 +50045,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableNamesResponse_descriptor; @@ -47335,6 +50078,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse)other); @@ -47365,21 +50134,21 @@ public final class MasterProtos { tableNames_ = other.tableNames_; bitField0_ = (bitField0_ & ~0x00000001); tableNamesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTableNamesFieldBuilder() : null; } else { tableNamesBuilder_.addAllMessages(other.tableNames_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getTableNamesCount(); i++) { if (!getTableNames(i).isInitialized()) { - return false; } } @@ -47395,7 +50164,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -47405,7 +50174,6 @@ public final class MasterProtos { } private int bitField0_; - // repeated .hbase.pb.TableName table_names = 1; private java.util.List tableNames_ = java.util.Collections.emptyList(); private void ensureTableNamesIsMutable() { @@ -47415,7 +50183,7 @@ public final class MasterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNamesBuilder_; /** @@ -47547,7 +50315,8 @@ public final class MasterProtos { java.lang.Iterable values) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); - super.addAll(values, tableNames_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, tableNames_); onChanged(); } else { tableNamesBuilder_.addAllMessages(values); @@ -47630,11 +50399,11 @@ public final class MasterProtos { getTableNamesBuilderList() { return getTableNamesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNamesFieldBuilder() { if (tableNamesBuilder_ == null) { - tableNamesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + tableNamesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableNames_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -47644,22 +50413,59 @@ public final class MasterProtos { } return tableNamesBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetTableNamesResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetTableNamesResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetTableNamesResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetTableNamesResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTableNamesResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetTableNamesResponse) } - public interface GetTableStateRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetTableStateRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetTableStateRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -47676,36 +50482,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetTableStateRequest} */ - public static final class GetTableStateRequest extends - com.google.protobuf.GeneratedMessage - implements GetTableStateRequestOrBuilder { + public static final class GetTableStateRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetTableStateRequest) + GetTableStateRequestOrBuilder { // Use GetTableStateRequest.newBuilder() to construct. - private GetTableStateRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetTableStateRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetTableStateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetTableStateRequest defaultInstance; - public static GetTableStateRequest getDefaultInstance() { - return defaultInstance; + private GetTableStateRequest() { } - public GetTableStateRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetTableStateRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -47743,7 +50540,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -47754,30 +50551,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableStateRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableStateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetTableStateRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetTableStateRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -47790,22 +50571,20 @@ public final class MasterProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -47821,36 +50600,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -47866,12 +50637,10 @@ public final class MasterProtos { result = result && getTableName() .equals(other.getTableName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -47883,7 +50652,7 @@ public final class MasterProtos { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -47911,46 +50680,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -47958,14 +50738,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetTableStateRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetTableStateRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableStateRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableStateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -47978,23 +50759,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -48002,10 +50780,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableStateRequest_descriptor; @@ -48040,6 +50814,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest)other); @@ -48054,17 +50854,16 @@ public final class MasterProtos { if (other.hasTableName()) { mergeTableName(other.getTableName()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -48079,7 +50878,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -48089,9 +50888,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -48104,7 +50902,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -48145,6 +50943,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -48163,7 +50962,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -48186,41 +50985,79 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetTableStateRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetTableStateRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetTableStateRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetTableStateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTableStateRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetTableStateRequest) } - public interface GetTableStateResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetTableStateResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetTableStateResponse) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableState table_state = 1; /** * required .hbase.pb.TableState table_state = 1; */ @@ -48237,36 +51074,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetTableStateResponse} */ - public static final class GetTableStateResponse extends - com.google.protobuf.GeneratedMessage - implements GetTableStateResponseOrBuilder { + public static final class GetTableStateResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetTableStateResponse) + GetTableStateResponseOrBuilder { // Use GetTableStateResponse.newBuilder() to construct. - private GetTableStateResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetTableStateResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetTableStateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetTableStateResponse defaultInstance; - public static GetTableStateResponse getDefaultInstance() { - return defaultInstance; - } - - public GetTableStateResponse getDefaultInstanceForType() { - return defaultInstance; + private GetTableStateResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetTableStateResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -48304,7 +51132,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -48315,30 +51143,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableStateResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableStateResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetTableStateResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetTableStateResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableState table_state = 1; public static final int TABLE_STATE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState tableState_; /** @@ -48351,22 +51163,20 @@ public final class MasterProtos { * required .hbase.pb.TableState table_state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState getTableState() { - return tableState_; + return tableState_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance() : tableState_; } /** * required .hbase.pb.TableState table_state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableStateOrBuilder getTableStateOrBuilder() { - return tableState_; + return tableState_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance() : tableState_; } - private void initFields() { - tableState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableState()) { memoizedIsInitialized = 0; @@ -48382,36 +51192,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableState_); + output.writeMessage(1, getTableState()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableState_); + .computeMessageSize(1, getTableState()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -48427,12 +51229,10 @@ public final class MasterProtos { result = result && getTableState() .equals(other.getTableState()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -48444,7 +51244,7 @@ public final class MasterProtos { hash = (37 * hash) + TABLE_STATE_FIELD_NUMBER; hash = (53 * hash) + getTableState().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -48472,46 +51272,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -48519,14 +51330,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetTableStateResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetTableStateResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableStateResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableStateResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -48539,23 +51351,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableStateFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableStateBuilder_ == null) { - tableState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance(); + tableState_ = null; } else { tableStateBuilder_.clear(); } @@ -48563,10 +51372,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetTableStateResponse_descriptor; @@ -48601,6 +51406,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse)other); @@ -48615,17 +51446,16 @@ public final class MasterProtos { if (other.hasTableState()) { mergeTableState(other.getTableState()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableState()) { - return false; } if (!getTableState().isInitialized()) { - return false; } return true; @@ -48640,7 +51470,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -48650,9 +51480,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableState table_state = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState tableState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState tableState_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableStateOrBuilder> tableStateBuilder_; /** * required .hbase.pb.TableState table_state = 1; @@ -48665,7 +51494,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState getTableState() { if (tableStateBuilder_ == null) { - return tableState_; + return tableState_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance() : tableState_; } else { return tableStateBuilder_.getMessage(); } @@ -48706,6 +51535,7 @@ public final class MasterProtos { public Builder mergeTableState(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState value) { if (tableStateBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableState_ != null && tableState_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance()) { tableState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.newBuilder(tableState_).mergeFrom(value).buildPartial(); @@ -48724,7 +51554,7 @@ public final class MasterProtos { */ public Builder clearTableState() { if (tableStateBuilder_ == null) { - tableState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance(); + tableState_ = null; onChanged(); } else { tableStateBuilder_.clear(); @@ -48747,73 +51577,103 @@ public final class MasterProtos { if (tableStateBuilder_ != null) { return tableStateBuilder_.getMessageOrBuilder(); } else { - return tableState_; + return tableState_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance() : tableState_; } } /** * required .hbase.pb.TableState table_state = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableStateOrBuilder> getTableStateFieldBuilder() { if (tableStateBuilder_ == null) { - tableStateBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableStateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableStateOrBuilder>( - tableState_, + getTableState(), getParentForChildren(), isClean()); tableState_ = null; } return tableStateBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetTableStateResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetTableStateResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetTableStateResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetTableStateResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTableStateResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetTableStateResponse) } - public interface GetClusterStatusRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetClusterStatusRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetClusterStatusRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.GetClusterStatusRequest} */ - public static final class GetClusterStatusRequest extends - com.google.protobuf.GeneratedMessage - implements GetClusterStatusRequestOrBuilder { + public static final class GetClusterStatusRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetClusterStatusRequest) + GetClusterStatusRequestOrBuilder { // Use GetClusterStatusRequest.newBuilder() to construct. - private GetClusterStatusRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetClusterStatusRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetClusterStatusRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetClusterStatusRequest defaultInstance; - public static GetClusterStatusRequest getDefaultInstance() { - return defaultInstance; - } - - public GetClusterStatusRequest getDefaultInstanceForType() { - return defaultInstance; + private GetClusterStatusRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetClusterStatusRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -48837,7 +51697,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -48848,34 +51708,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetClusterStatusRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetClusterStatusRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetClusterStatusRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetClusterStatusRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -48883,29 +51727,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -48916,12 +51752,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -48929,7 +51763,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -48957,46 +51791,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -49004,14 +51849,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetClusterStatusRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetClusterStatusRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetClusterStatusRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetClusterStatusRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -49024,27 +51870,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetClusterStatusRequest_descriptor; @@ -49068,6 +51907,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest)other); @@ -49079,7 +51944,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -49096,7 +51962,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -49104,22 +51970,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetClusterStatusRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetClusterStatusRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetClusterStatusRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetClusterStatusRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetClusterStatusRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetClusterStatusRequest) } - public interface GetClusterStatusResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetClusterStatusResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetClusterStatusResponse) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ClusterStatus cluster_status = 1; /** * required .hbase.pb.ClusterStatus cluster_status = 1; */ @@ -49136,36 +52039,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetClusterStatusResponse} */ - public static final class GetClusterStatusResponse extends - com.google.protobuf.GeneratedMessage - implements GetClusterStatusResponseOrBuilder { + public static final class GetClusterStatusResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetClusterStatusResponse) + GetClusterStatusResponseOrBuilder { // Use GetClusterStatusResponse.newBuilder() to construct. - private GetClusterStatusResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetClusterStatusResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetClusterStatusResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetClusterStatusResponse defaultInstance; - public static GetClusterStatusResponse getDefaultInstance() { - return defaultInstance; } - - public GetClusterStatusResponse getDefaultInstanceForType() { - return defaultInstance; + private GetClusterStatusResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetClusterStatusResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -49203,7 +52097,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -49214,30 +52108,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetClusterStatusResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetClusterStatusResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetClusterStatusResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetClusterStatusResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ClusterStatus cluster_status = 1; public static final int CLUSTER_STATUS_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_; /** @@ -49250,22 +52128,20 @@ public final class MasterProtos { * required .hbase.pb.ClusterStatus cluster_status = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() { - return clusterStatus_; + return clusterStatus_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance() : clusterStatus_; } /** * required .hbase.pb.ClusterStatus cluster_status = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() { - return clusterStatus_; + return clusterStatus_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance() : clusterStatus_; } - private void initFields() { - clusterStatus_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasClusterStatus()) { memoizedIsInitialized = 0; @@ -49281,36 +52157,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, clusterStatus_); + output.writeMessage(1, getClusterStatus()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, clusterStatus_); + .computeMessageSize(1, getClusterStatus()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -49326,12 +52194,10 @@ public final class MasterProtos { result = result && getClusterStatus() .equals(other.getClusterStatus()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -49343,7 +52209,7 @@ public final class MasterProtos { hash = (37 * hash) + CLUSTER_STATUS_FIELD_NUMBER; hash = (53 * hash) + getClusterStatus().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -49371,46 +52237,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -49418,14 +52295,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetClusterStatusResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetClusterStatusResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetClusterStatusResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetClusterStatusResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -49438,23 +52316,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getClusterStatusFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (clusterStatusBuilder_ == null) { - clusterStatus_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); + clusterStatus_ = null; } else { clusterStatusBuilder_.clear(); } @@ -49462,10 +52337,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetClusterStatusResponse_descriptor; @@ -49500,6 +52371,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse)other); @@ -49514,17 +52411,16 @@ public final class MasterProtos { if (other.hasClusterStatus()) { mergeClusterStatus(other.getClusterStatus()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasClusterStatus()) { - return false; } if (!getClusterStatus().isInitialized()) { - return false; } return true; @@ -49539,7 +52435,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -49549,9 +52445,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.ClusterStatus cluster_status = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> clusterStatusBuilder_; /** * required .hbase.pb.ClusterStatus cluster_status = 1; @@ -49564,7 +52459,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() { if (clusterStatusBuilder_ == null) { - return clusterStatus_; + return clusterStatus_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance() : clusterStatus_; } else { return clusterStatusBuilder_.getMessage(); } @@ -49605,6 +52500,7 @@ public final class MasterProtos { public Builder mergeClusterStatus(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus value) { if (clusterStatusBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + clusterStatus_ != null && clusterStatus_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance()) { clusterStatus_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder(clusterStatus_).mergeFrom(value).buildPartial(); @@ -49623,7 +52519,7 @@ public final class MasterProtos { */ public Builder clearClusterStatus() { if (clusterStatusBuilder_ == null) { - clusterStatus_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); + clusterStatus_ = null; onChanged(); } else { clusterStatusBuilder_.clear(); @@ -49646,73 +52542,103 @@ public final class MasterProtos { if (clusterStatusBuilder_ != null) { return clusterStatusBuilder_.getMessageOrBuilder(); } else { - return clusterStatus_; + return clusterStatus_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance() : clusterStatus_; } } /** * required .hbase.pb.ClusterStatus cluster_status = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> getClusterStatusFieldBuilder() { if (clusterStatusBuilder_ == null) { - clusterStatusBuilder_ = new com.google.protobuf.SingleFieldBuilder< + clusterStatusBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder>( - clusterStatus_, + getClusterStatus(), getParentForChildren(), isClean()); clusterStatus_ = null; } return clusterStatusBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetClusterStatusResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetClusterStatusResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetClusterStatusResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetClusterStatusResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetClusterStatusResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetClusterStatusResponse) } - public interface IsMasterRunningRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsMasterRunningRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsMasterRunningRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.IsMasterRunningRequest} */ - public static final class IsMasterRunningRequest extends - com.google.protobuf.GeneratedMessage - implements IsMasterRunningRequestOrBuilder { + public static final class IsMasterRunningRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsMasterRunningRequest) + IsMasterRunningRequestOrBuilder { // Use IsMasterRunningRequest.newBuilder() to construct. - private IsMasterRunningRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsMasterRunningRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private IsMasterRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsMasterRunningRequest defaultInstance; - public static IsMasterRunningRequest getDefaultInstance() { - return defaultInstance; + private IsMasterRunningRequest() { } - public IsMasterRunningRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsMasterRunningRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -49736,7 +52662,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -49747,34 +52673,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsMasterRunningRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsMasterRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsMasterRunningRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsMasterRunningRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -49782,29 +52692,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -49815,12 +52717,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -49828,7 +52728,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -49856,46 +52756,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -49903,14 +52814,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsMasterRunningRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsMasterRunningRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsMasterRunningRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsMasterRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -49923,27 +52835,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsMasterRunningRequest_descriptor; @@ -49967,6 +52872,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest)other); @@ -49978,7 +52909,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -49995,7 +52927,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -50003,22 +52935,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsMasterRunningRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsMasterRunningRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest DEFAULT_INSTANCE; static { - defaultInstance = new IsMasterRunningRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsMasterRunningRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsMasterRunningRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsMasterRunningRequest) } - public interface IsMasterRunningResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsMasterRunningResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsMasterRunningResponse) + com.google.protobuf.MessageOrBuilder { - // required bool is_master_running = 1; /** * required bool is_master_running = 1; */ @@ -50031,36 +53000,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsMasterRunningResponse} */ - public static final class IsMasterRunningResponse extends - com.google.protobuf.GeneratedMessage - implements IsMasterRunningResponseOrBuilder { + public static final class IsMasterRunningResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsMasterRunningResponse) + IsMasterRunningResponseOrBuilder { // Use IsMasterRunningResponse.newBuilder() to construct. - private IsMasterRunningResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsMasterRunningResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IsMasterRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsMasterRunningResponse defaultInstance; - public static IsMasterRunningResponse getDefaultInstance() { - return defaultInstance; } - - public IsMasterRunningResponse getDefaultInstanceForType() { - return defaultInstance; + private IsMasterRunningResponse() { + isMasterRunning_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsMasterRunningResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -50090,7 +53051,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -50101,30 +53062,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsMasterRunningResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsMasterRunningResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsMasterRunningResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsMasterRunningResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool is_master_running = 1; public static final int IS_MASTER_RUNNING_FIELD_NUMBER = 1; private boolean isMasterRunning_; /** @@ -50140,13 +53085,11 @@ public final class MasterProtos { return isMasterRunning_; } - private void initFields() { - isMasterRunning_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasIsMasterRunning()) { memoizedIsInitialized = 0; @@ -50158,16 +53101,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, isMasterRunning_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -50175,19 +53116,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, isMasterRunning_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -50203,12 +53138,10 @@ public final class MasterProtos { result = result && (getIsMasterRunning() == other.getIsMasterRunning()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -50218,9 +53151,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasIsMasterRunning()) { hash = (37 * hash) + IS_MASTER_RUNNING_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getIsMasterRunning()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIsMasterRunning()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -50248,46 +53182,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -50295,14 +53240,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsMasterRunningResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsMasterRunningResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsMasterRunningResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsMasterRunningResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -50315,18 +53261,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); isMasterRunning_ = false; @@ -50334,10 +53277,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsMasterRunningResponse_descriptor; @@ -50368,6 +53307,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse)other); @@ -50382,13 +53347,13 @@ public final class MasterProtos { if (other.hasIsMasterRunning()) { setIsMasterRunning(other.getIsMasterRunning()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasIsMasterRunning()) { - return false; } return true; @@ -50403,7 +53368,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -50413,7 +53378,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool is_master_running = 1; private boolean isMasterRunning_ ; /** * required bool is_master_running = 1; @@ -50445,22 +53409,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsMasterRunningResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsMasterRunningResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse DEFAULT_INSTANCE; static { - defaultInstance = new IsMasterRunningResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsMasterRunningResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsMasterRunningResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsMasterRunningResponse) } - public interface ExecProcedureRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ExecProcedureRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ExecProcedureRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ProcedureDescription procedure = 1; /** * required .hbase.pb.ProcedureDescription procedure = 1; */ @@ -50477,36 +53478,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ExecProcedureRequest} */ - public static final class ExecProcedureRequest extends - com.google.protobuf.GeneratedMessage - implements ExecProcedureRequestOrBuilder { + public static final class ExecProcedureRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ExecProcedureRequest) + ExecProcedureRequestOrBuilder { // Use ExecProcedureRequest.newBuilder() to construct. - private ExecProcedureRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ExecProcedureRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ExecProcedureRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ExecProcedureRequest defaultInstance; - public static ExecProcedureRequest getDefaultInstance() { - return defaultInstance; + private ExecProcedureRequest() { } - public ExecProcedureRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ExecProcedureRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -50544,7 +53536,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -50555,30 +53547,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ExecProcedureRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ExecProcedureRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ExecProcedureRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ExecProcedureRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ProcedureDescription procedure = 1; public static final int PROCEDURE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription procedure_; /** @@ -50591,22 +53567,20 @@ public final class MasterProtos { * required .hbase.pb.ProcedureDescription procedure = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() { - return procedure_; + return procedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : procedure_; } /** * required .hbase.pb.ProcedureDescription procedure = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder() { - return procedure_; + return procedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : procedure_; } - private void initFields() { - procedure_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasProcedure()) { memoizedIsInitialized = 0; @@ -50622,36 +53596,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, procedure_); + output.writeMessage(1, getProcedure()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, procedure_); + .computeMessageSize(1, getProcedure()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -50667,12 +53633,10 @@ public final class MasterProtos { result = result && getProcedure() .equals(other.getProcedure()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -50684,7 +53648,7 @@ public final class MasterProtos { hash = (37 * hash) + PROCEDURE_FIELD_NUMBER; hash = (53 * hash) + getProcedure().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -50712,46 +53676,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -50759,14 +53734,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ExecProcedureRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ExecProcedureRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ExecProcedureRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ExecProcedureRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -50779,23 +53755,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getProcedureFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (procedureBuilder_ == null) { - procedure_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); + procedure_ = null; } else { procedureBuilder_.clear(); } @@ -50803,10 +53776,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ExecProcedureRequest_descriptor; @@ -50841,6 +53810,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest)other); @@ -50855,17 +53850,16 @@ public final class MasterProtos { if (other.hasProcedure()) { mergeProcedure(other.getProcedure()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasProcedure()) { - return false; } if (!getProcedure().isInitialized()) { - return false; } return true; @@ -50880,7 +53874,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -50890,9 +53884,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.ProcedureDescription procedure = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription procedure_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription procedure_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> procedureBuilder_; /** * required .hbase.pb.ProcedureDescription procedure = 1; @@ -50905,7 +53898,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() { if (procedureBuilder_ == null) { - return procedure_; + return procedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : procedure_; } else { return procedureBuilder_.getMessage(); } @@ -50946,6 +53939,7 @@ public final class MasterProtos { public Builder mergeProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription value) { if (procedureBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + procedure_ != null && procedure_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) { procedure_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder(procedure_).mergeFrom(value).buildPartial(); @@ -50964,7 +53958,7 @@ public final class MasterProtos { */ public Builder clearProcedure() { if (procedureBuilder_ == null) { - procedure_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); + procedure_ = null; onChanged(); } else { procedureBuilder_.clear(); @@ -50987,41 +53981,79 @@ public final class MasterProtos { if (procedureBuilder_ != null) { return procedureBuilder_.getMessageOrBuilder(); } else { - return procedure_; + return procedure_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : procedure_; } } /** * required .hbase.pb.ProcedureDescription procedure = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> getProcedureFieldBuilder() { if (procedureBuilder_ == null) { - procedureBuilder_ = new com.google.protobuf.SingleFieldBuilder< + procedureBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>( - procedure_, + getProcedure(), getParentForChildren(), isClean()); procedure_ = null; } return procedureBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ExecProcedureRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ExecProcedureRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest DEFAULT_INSTANCE; static { - defaultInstance = new ExecProcedureRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ExecProcedureRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExecProcedureRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ExecProcedureRequest) } - public interface ExecProcedureResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ExecProcedureResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ExecProcedureResponse) + com.google.protobuf.MessageOrBuilder { - // optional int64 expected_timeout = 1; /** * optional int64 expected_timeout = 1; */ @@ -51031,7 +54063,6 @@ public final class MasterProtos { */ long getExpectedTimeout(); - // optional bytes return_data = 2; /** * optional bytes return_data = 2; */ @@ -51044,36 +54075,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ExecProcedureResponse} */ - public static final class ExecProcedureResponse extends - com.google.protobuf.GeneratedMessage - implements ExecProcedureResponseOrBuilder { + public static final class ExecProcedureResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ExecProcedureResponse) + ExecProcedureResponseOrBuilder { // Use ExecProcedureResponse.newBuilder() to construct. - private ExecProcedureResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ExecProcedureResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ExecProcedureResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ExecProcedureResponse defaultInstance; - public static ExecProcedureResponse getDefaultInstance() { - return defaultInstance; - } - - public ExecProcedureResponse getDefaultInstanceForType() { - return defaultInstance; + private ExecProcedureResponse() { + expectedTimeout_ = 0L; + returnData_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ExecProcedureResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -51108,7 +54132,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -51119,30 +54143,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ExecProcedureResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ExecProcedureResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ExecProcedureResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ExecProcedureResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional int64 expected_timeout = 1; public static final int EXPECTED_TIMEOUT_FIELD_NUMBER = 1; private long expectedTimeout_; /** @@ -51158,7 +54166,6 @@ public final class MasterProtos { return expectedTimeout_; } - // optional bytes return_data = 2; public static final int RETURN_DATA_FIELD_NUMBER = 2; private com.google.protobuf.ByteString returnData_; /** @@ -51174,14 +54181,11 @@ public final class MasterProtos { return returnData_; } - private void initFields() { - expectedTimeout_ = 0L; - returnData_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -51189,19 +54193,17 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, expectedTimeout_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, returnData_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -51213,19 +54215,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, returnData_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -51246,12 +54242,10 @@ public final class MasterProtos { result = result && getReturnData() .equals(other.getReturnData()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -51261,13 +54255,14 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasExpectedTimeout()) { hash = (37 * hash) + EXPECTED_TIMEOUT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getExpectedTimeout()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getExpectedTimeout()); } if (hasReturnData()) { hash = (37 * hash) + RETURN_DATA_FIELD_NUMBER; hash = (53 * hash) + getReturnData().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -51295,46 +54290,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -51342,14 +54348,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ExecProcedureResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ExecProcedureResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ExecProcedureResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ExecProcedureResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -51362,18 +54369,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); expectedTimeout_ = 0L; @@ -51383,10 +54387,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ExecProcedureResponse_descriptor; @@ -51421,6 +54421,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse)other); @@ -51438,7 +54464,8 @@ public final class MasterProtos { if (other.hasReturnData()) { setReturnData(other.getReturnData()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -51455,7 +54482,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -51465,7 +54492,6 @@ public final class MasterProtos { } private int bitField0_; - // optional int64 expected_timeout = 1; private long expectedTimeout_ ; /** * optional int64 expected_timeout = 1; @@ -51498,7 +54524,6 @@ public final class MasterProtos { return this; } - // optional bytes return_data = 2; private com.google.protobuf.ByteString returnData_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes return_data = 2; @@ -51533,22 +54558,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ExecProcedureResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ExecProcedureResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse DEFAULT_INSTANCE; static { - defaultInstance = new ExecProcedureResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ExecProcedureResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExecProcedureResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ExecProcedureResponse) } - public interface IsProcedureDoneRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsProcedureDoneRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsProcedureDoneRequest) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.ProcedureDescription procedure = 1; /** * optional .hbase.pb.ProcedureDescription procedure = 1; */ @@ -51565,36 +54627,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsProcedureDoneRequest} */ - public static final class IsProcedureDoneRequest extends - com.google.protobuf.GeneratedMessage - implements IsProcedureDoneRequestOrBuilder { + public static final class IsProcedureDoneRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsProcedureDoneRequest) + IsProcedureDoneRequestOrBuilder { // Use IsProcedureDoneRequest.newBuilder() to construct. - private IsProcedureDoneRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsProcedureDoneRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private IsProcedureDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsProcedureDoneRequest defaultInstance; - public static IsProcedureDoneRequest getDefaultInstance() { - return defaultInstance; - } - - public IsProcedureDoneRequest getDefaultInstanceForType() { - return defaultInstance; + private IsProcedureDoneRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsProcedureDoneRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -51632,7 +54685,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -51643,30 +54696,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsProcedureDoneRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsProcedureDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsProcedureDoneRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsProcedureDoneRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.ProcedureDescription procedure = 1; public static final int PROCEDURE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription procedure_; /** @@ -51679,22 +54716,20 @@ public final class MasterProtos { * optional .hbase.pb.ProcedureDescription procedure = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() { - return procedure_; + return procedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : procedure_; } /** * optional .hbase.pb.ProcedureDescription procedure = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder() { - return procedure_; + return procedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : procedure_; } - private void initFields() { - procedure_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasProcedure()) { if (!getProcedure().isInitialized()) { @@ -51708,36 +54743,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, procedure_); + output.writeMessage(1, getProcedure()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, procedure_); + .computeMessageSize(1, getProcedure()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -51753,12 +54780,10 @@ public final class MasterProtos { result = result && getProcedure() .equals(other.getProcedure()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -51770,7 +54795,7 @@ public final class MasterProtos { hash = (37 * hash) + PROCEDURE_FIELD_NUMBER; hash = (53 * hash) + getProcedure().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -51798,46 +54823,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -51845,14 +54881,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsProcedureDoneRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsProcedureDoneRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsProcedureDoneRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsProcedureDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -51865,23 +54902,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getProcedureFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (procedureBuilder_ == null) { - procedure_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); + procedure_ = null; } else { procedureBuilder_.clear(); } @@ -51889,10 +54923,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsProcedureDoneRequest_descriptor; @@ -51927,6 +54957,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest)other); @@ -51941,14 +54997,14 @@ public final class MasterProtos { if (other.hasProcedure()) { mergeProcedure(other.getProcedure()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasProcedure()) { if (!getProcedure().isInitialized()) { - return false; } } @@ -51964,7 +55020,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -51974,9 +55030,8 @@ public final class MasterProtos { } private int bitField0_; - // optional .hbase.pb.ProcedureDescription procedure = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription procedure_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription procedure_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> procedureBuilder_; /** * optional .hbase.pb.ProcedureDescription procedure = 1; @@ -51989,7 +55044,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() { if (procedureBuilder_ == null) { - return procedure_; + return procedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : procedure_; } else { return procedureBuilder_.getMessage(); } @@ -52030,6 +55085,7 @@ public final class MasterProtos { public Builder mergeProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription value) { if (procedureBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + procedure_ != null && procedure_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) { procedure_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder(procedure_).mergeFrom(value).buildPartial(); @@ -52048,7 +55104,7 @@ public final class MasterProtos { */ public Builder clearProcedure() { if (procedureBuilder_ == null) { - procedure_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); + procedure_ = null; onChanged(); } else { procedureBuilder_.clear(); @@ -52071,41 +55127,79 @@ public final class MasterProtos { if (procedureBuilder_ != null) { return procedureBuilder_.getMessageOrBuilder(); } else { - return procedure_; + return procedure_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : procedure_; } } /** * optional .hbase.pb.ProcedureDescription procedure = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> getProcedureFieldBuilder() { if (procedureBuilder_ == null) { - procedureBuilder_ = new com.google.protobuf.SingleFieldBuilder< + procedureBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>( - procedure_, + getProcedure(), getParentForChildren(), isClean()); procedure_ = null; } return procedureBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsProcedureDoneRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsProcedureDoneRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest DEFAULT_INSTANCE; static { - defaultInstance = new IsProcedureDoneRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsProcedureDoneRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsProcedureDoneRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsProcedureDoneRequest) } - public interface IsProcedureDoneResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface IsProcedureDoneResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.IsProcedureDoneResponse) + com.google.protobuf.MessageOrBuilder { - // optional bool done = 1 [default = false]; /** * optional bool done = 1 [default = false]; */ @@ -52115,7 +55209,6 @@ public final class MasterProtos { */ boolean getDone(); - // optional .hbase.pb.ProcedureDescription snapshot = 2; /** * optional .hbase.pb.ProcedureDescription snapshot = 2; */ @@ -52132,36 +55225,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.IsProcedureDoneResponse} */ - public static final class IsProcedureDoneResponse extends - com.google.protobuf.GeneratedMessage - implements IsProcedureDoneResponseOrBuilder { + public static final class IsProcedureDoneResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.IsProcedureDoneResponse) + IsProcedureDoneResponseOrBuilder { // Use IsProcedureDoneResponse.newBuilder() to construct. - private IsProcedureDoneResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private IsProcedureDoneResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private IsProcedureDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IsProcedureDoneResponse defaultInstance; - public static IsProcedureDoneResponse getDefaultInstance() { - return defaultInstance; - } - - public IsProcedureDoneResponse getDefaultInstanceForType() { - return defaultInstance; + private IsProcedureDoneResponse() { + done_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private IsProcedureDoneResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -52204,7 +55289,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -52215,30 +55300,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsProcedureDoneResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsProcedureDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsProcedureDoneResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IsProcedureDoneResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool done = 1 [default = false]; public static final int DONE_FIELD_NUMBER = 1; private boolean done_; /** @@ -52254,7 +55323,6 @@ public final class MasterProtos { return done_; } - // optional .hbase.pb.ProcedureDescription snapshot = 2; public static final int SNAPSHOT_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription snapshot_; /** @@ -52267,23 +55335,20 @@ public final class MasterProtos { * optional .hbase.pb.ProcedureDescription snapshot = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getSnapshot() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : snapshot_; } /** * optional .hbase.pb.ProcedureDescription snapshot = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getSnapshotOrBuilder() { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : snapshot_; } - private void initFields() { - done_ = false; - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { @@ -52297,19 +55362,17 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, done_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, snapshot_); + output.writeMessage(2, getSnapshot()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -52319,21 +55382,15 @@ public final class MasterProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, snapshot_); + .computeMessageSize(2, getSnapshot()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -52354,12 +55411,10 @@ public final class MasterProtos { result = result && getSnapshot() .equals(other.getSnapshot()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -52369,13 +55424,14 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDone()) { hash = (37 * hash) + DONE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getDone()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDone()); } if (hasSnapshot()) { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -52403,46 +55459,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -52450,14 +55517,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.IsProcedureDoneResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.IsProcedureDoneResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsProcedureDoneResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsProcedureDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -52470,25 +55538,22 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); done_ = false; bitField0_ = (bitField0_ & ~0x00000001); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); + snapshot_ = null; } else { snapshotBuilder_.clear(); } @@ -52496,10 +55561,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsProcedureDoneResponse_descriptor; @@ -52538,6 +55599,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse)other); @@ -52555,14 +55642,14 @@ public final class MasterProtos { if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { - return false; } } @@ -52578,7 +55665,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -52588,7 +55675,6 @@ public final class MasterProtos { } private int bitField0_; - // optional bool done = 1 [default = false]; private boolean done_ ; /** * optional bool done = 1 [default = false]; @@ -52621,9 +55707,8 @@ public final class MasterProtos { return this; } - // optional .hbase.pb.ProcedureDescription snapshot = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription snapshot_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> snapshotBuilder_; /** * optional .hbase.pb.ProcedureDescription snapshot = 2; @@ -52636,7 +55721,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getSnapshot() { if (snapshotBuilder_ == null) { - return snapshot_; + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : snapshot_; } else { return snapshotBuilder_.getMessage(); } @@ -52677,6 +55762,7 @@ public final class MasterProtos { public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + snapshot_ != null && snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); @@ -52695,7 +55781,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); + snapshot_ = null; onChanged(); } else { snapshotBuilder_.clear(); @@ -52718,41 +55804,79 @@ public final class MasterProtos { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { - return snapshot_; + return snapshot_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance() : snapshot_; } } /** * optional .hbase.pb.ProcedureDescription snapshot = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { - snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< + snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>( - snapshot_, + getSnapshot(), getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.IsProcedureDoneResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.IsProcedureDoneResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse DEFAULT_INSTANCE; static { - defaultInstance = new IsProcedureDoneResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public IsProcedureDoneResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsProcedureDoneResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.IsProcedureDoneResponse) } - public interface GetProcedureResultRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetProcedureResultRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetProcedureResultRequest) + com.google.protobuf.MessageOrBuilder { - // required uint64 proc_id = 1; /** * required uint64 proc_id = 1; */ @@ -52765,36 +55889,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetProcedureResultRequest} */ - public static final class GetProcedureResultRequest extends - com.google.protobuf.GeneratedMessage - implements GetProcedureResultRequestOrBuilder { + public static final class GetProcedureResultRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetProcedureResultRequest) + GetProcedureResultRequestOrBuilder { // Use GetProcedureResultRequest.newBuilder() to construct. - private GetProcedureResultRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetProcedureResultRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetProcedureResultRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetProcedureResultRequest defaultInstance; - public static GetProcedureResultRequest getDefaultInstance() { - return defaultInstance; } - - public GetProcedureResultRequest getDefaultInstanceForType() { - return defaultInstance; + private GetProcedureResultRequest() { + procId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetProcedureResultRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -52824,7 +55940,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -52835,30 +55951,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetProcedureResultRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetProcedureResultRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetProcedureResultRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetProcedureResultRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -52874,13 +55974,11 @@ public final class MasterProtos { return procId_; } - private void initFields() { - procId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasProcId()) { memoizedIsInitialized = 0; @@ -52892,16 +55990,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -52909,19 +56005,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, procId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -52937,12 +56027,10 @@ public final class MasterProtos { result = result && (getProcId() == other.getProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -52952,9 +56040,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -52982,46 +56071,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -53029,14 +56129,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetProcedureResultRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetProcedureResultRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetProcedureResultRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetProcedureResultRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -53049,18 +56150,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -53068,10 +56166,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetProcedureResultRequest_descriptor; @@ -53102,6 +56196,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest)other); @@ -53116,13 +56236,13 @@ public final class MasterProtos { if (other.hasProcId()) { setProcId(other.getProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasProcId()) { - return false; } return true; @@ -53137,7 +56257,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -53147,7 +56267,6 @@ public final class MasterProtos { } private int bitField0_; - // required uint64 proc_id = 1; private long procId_ ; /** * required uint64 proc_id = 1; @@ -53179,22 +56298,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetProcedureResultRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetProcedureResultRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetProcedureResultRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetProcedureResultRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetProcedureResultRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetProcedureResultRequest) } - public interface GetProcedureResultResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetProcedureResultResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetProcedureResultResponse) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.GetProcedureResultResponse.State state = 1; /** * required .hbase.pb.GetProcedureResultResponse.State state = 1; */ @@ -53204,7 +56360,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State getState(); - // optional uint64 start_time = 2; /** * optional uint64 start_time = 2; */ @@ -53214,7 +56369,6 @@ public final class MasterProtos { */ long getStartTime(); - // optional uint64 last_update = 3; /** * optional uint64 last_update = 3; */ @@ -53224,7 +56378,6 @@ public final class MasterProtos { */ long getLastUpdate(); - // optional bytes result = 4; /** * optional bytes result = 4; */ @@ -53234,7 +56387,6 @@ public final class MasterProtos { */ com.google.protobuf.ByteString getResult(); - // optional .hbase.pb.ForeignExceptionMessage exception = 5; /** * optional .hbase.pb.ForeignExceptionMessage exception = 5; */ @@ -53251,36 +56403,31 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.GetProcedureResultResponse} */ - public static final class GetProcedureResultResponse extends - com.google.protobuf.GeneratedMessage - implements GetProcedureResultResponseOrBuilder { + public static final class GetProcedureResultResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetProcedureResultResponse) + GetProcedureResultResponseOrBuilder { // Use GetProcedureResultResponse.newBuilder() to construct. - private GetProcedureResultResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetProcedureResultResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetProcedureResultResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetProcedureResultResponse defaultInstance; - public static GetProcedureResultResponse getDefaultInstance() { - return defaultInstance; - } - - public GetProcedureResultResponse getDefaultInstanceForType() { - return defaultInstance; + private GetProcedureResultResponse() { + state_ = 0; + startTime_ = 0L; + lastUpdate_ = 0L; + result_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetProcedureResultResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -53306,7 +56453,7 @@ public final class MasterProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - state_ = value; + state_ = rawValue; } break; } @@ -53344,7 +56491,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -53355,28 +56502,13 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetProcedureResultResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetProcedureResultResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetProcedureResultResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetProcedureResultResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.GetProcedureResultResponse.State} */ @@ -53385,15 +56517,15 @@ public final class MasterProtos { /** * NOT_FOUND = 0; */ - NOT_FOUND(0, 0), + NOT_FOUND(0), /** * RUNNING = 1; */ - RUNNING(1, 1), + RUNNING(1), /** * FINISHED = 2; */ - FINISHED(2, 2), + FINISHED(2), ; /** @@ -53410,9 +56542,19 @@ public final class MasterProtos { public static final int FINISHED_VALUE = 2; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static State valueOf(int value) { + return forNumber(value); + } + + public static State forNumber(int value) { switch (value) { case 0: return NOT_FOUND; case 1: return RUNNING; @@ -53425,17 +56567,17 @@ public final class MasterProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public State findValueByNumber(int number) { - return State.valueOf(number); + return State.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -53457,11 +56599,9 @@ public final class MasterProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private State(int index, int value) { - this.index = index; + private State(int value) { this.value = value; } @@ -53469,9 +56609,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.GetProcedureResultResponse.State state = 1; public static final int STATE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State state_; + private int state_; /** * required .hbase.pb.GetProcedureResultResponse.State state = 1; */ @@ -53482,10 +56621,10 @@ public final class MasterProtos { * required .hbase.pb.GetProcedureResultResponse.State state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.NOT_FOUND : result; } - // optional uint64 start_time = 2; public static final int START_TIME_FIELD_NUMBER = 2; private long startTime_; /** @@ -53501,7 +56640,6 @@ public final class MasterProtos { return startTime_; } - // optional uint64 last_update = 3; public static final int LAST_UPDATE_FIELD_NUMBER = 3; private long lastUpdate_; /** @@ -53517,7 +56655,6 @@ public final class MasterProtos { return lastUpdate_; } - // optional bytes result = 4; public static final int RESULT_FIELD_NUMBER = 4; private com.google.protobuf.ByteString result_; /** @@ -53533,7 +56670,6 @@ public final class MasterProtos { return result_; } - // optional .hbase.pb.ForeignExceptionMessage exception = 5; public static final int EXCEPTION_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_; /** @@ -53546,26 +56682,20 @@ public final class MasterProtos { * optional .hbase.pb.ForeignExceptionMessage exception = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException() { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } /** * optional .hbase.pb.ForeignExceptionMessage exception = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder() { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } - private void initFields() { - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.NOT_FOUND; - startTime_ = 0L; - lastUpdate_ = 0L; - result_ = com.google.protobuf.ByteString.EMPTY; - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasState()) { memoizedIsInitialized = 0; @@ -53577,9 +56707,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, state_.getNumber()); + output.writeEnum(1, state_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, startTime_); @@ -53591,20 +56720,19 @@ public final class MasterProtos { output.writeBytes(4, result_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, exception_); + output.writeMessage(5, getException()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, state_.getNumber()); + .computeEnumSize(1, state_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -53620,21 +56748,15 @@ public final class MasterProtos { } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, exception_); + .computeMessageSize(5, getException()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -53647,8 +56769,7 @@ public final class MasterProtos { boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { - result = result && - (getState() == other.getState()); + result = result && state_ == other.state_; } result = result && (hasStartTime() == other.hasStartTime()); if (hasStartTime()) { @@ -53670,12 +56791,10 @@ public final class MasterProtos { result = result && getException() .equals(other.getException()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -53685,15 +56804,17 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); + hash = (53 * hash) + state_; } if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getStartTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStartTime()); } if (hasLastUpdate()) { hash = (37 * hash) + LAST_UPDATE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLastUpdate()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLastUpdate()); } if (hasResult()) { hash = (37 * hash) + RESULT_FIELD_NUMBER; @@ -53703,7 +56824,7 @@ public final class MasterProtos { hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; hash = (53 * hash) + getException().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -53731,46 +56852,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -53778,14 +56910,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.GetProcedureResultResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetProcedureResultResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetProcedureResultResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetProcedureResultResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -53798,22 +56931,19 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getExceptionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.NOT_FOUND; + state_ = 0; bitField0_ = (bitField0_ & ~0x00000001); startTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); @@ -53822,7 +56952,7 @@ public final class MasterProtos { result_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); + exception_ = null; } else { exceptionBuilder_.clear(); } @@ -53830,10 +56960,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetProcedureResultResponse_descriptor; @@ -53884,6 +57010,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse)other); @@ -53910,13 +57062,13 @@ public final class MasterProtos { if (other.hasException()) { mergeException(other.getException()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasState()) { - return false; } return true; @@ -53931,7 +57083,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -53941,8 +57093,7 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.GetProcedureResultResponse.State state = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.NOT_FOUND; + private int state_ = 0; /** * required .hbase.pb.GetProcedureResultResponse.State state = 1; */ @@ -53953,7 +57104,8 @@ public final class MasterProtos { * required .hbase.pb.GetProcedureResultResponse.State state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.NOT_FOUND : result; } /** * required .hbase.pb.GetProcedureResultResponse.State state = 1; @@ -53963,7 +57115,7 @@ public final class MasterProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - state_ = value; + state_ = value.getNumber(); onChanged(); return this; } @@ -53972,12 +57124,11 @@ public final class MasterProtos { */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.NOT_FOUND; + state_ = 0; onChanged(); return this; } - // optional uint64 start_time = 2; private long startTime_ ; /** * optional uint64 start_time = 2; @@ -54010,7 +57161,6 @@ public final class MasterProtos { return this; } - // optional uint64 last_update = 3; private long lastUpdate_ ; /** * optional uint64 last_update = 3; @@ -54043,7 +57193,6 @@ public final class MasterProtos { return this; } - // optional bytes result = 4; private com.google.protobuf.ByteString result_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes result = 4; @@ -54079,9 +57228,8 @@ public final class MasterProtos { return this; } - // optional .hbase.pb.ForeignExceptionMessage exception = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder> exceptionBuilder_; /** * optional .hbase.pb.ForeignExceptionMessage exception = 5; @@ -54094,7 +57242,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException() { if (exceptionBuilder_ == null) { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } else { return exceptionBuilder_.getMessage(); } @@ -54135,6 +57283,7 @@ public final class MasterProtos { public Builder mergeException(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && + exception_ != null && exception_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance()) { exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.newBuilder(exception_).mergeFrom(value).buildPartial(); @@ -54153,7 +57302,7 @@ public final class MasterProtos { */ public Builder clearException() { if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); + exception_ = null; onChanged(); } else { exceptionBuilder_.clear(); @@ -54176,41 +57325,79 @@ public final class MasterProtos { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { - return exception_; + return exception_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } } /** * optional .hbase.pb.ForeignExceptionMessage exception = 5; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { - exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder>( - exception_, + getException(), getParentForChildren(), isClean()); exception_ = null; } return exceptionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetProcedureResultResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetProcedureResultResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetProcedureResultResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetProcedureResultResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetProcedureResultResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetProcedureResultResponse) } - public interface AbortProcedureRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface AbortProcedureRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.AbortProcedureRequest) + com.google.protobuf.MessageOrBuilder { - // required uint64 proc_id = 1; /** * required uint64 proc_id = 1; */ @@ -54220,7 +57407,6 @@ public final class MasterProtos { */ long getProcId(); - // optional bool mayInterruptIfRunning = 2 [default = true]; /** * optional bool mayInterruptIfRunning = 2 [default = true]; */ @@ -54233,36 +57419,29 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.AbortProcedureRequest} */ - public static final class AbortProcedureRequest extends - com.google.protobuf.GeneratedMessage - implements AbortProcedureRequestOrBuilder { + public static final class AbortProcedureRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.AbortProcedureRequest) + AbortProcedureRequestOrBuilder { // Use AbortProcedureRequest.newBuilder() to construct. - private AbortProcedureRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private AbortProcedureRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private AbortProcedureRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AbortProcedureRequest defaultInstance; - public static AbortProcedureRequest getDefaultInstance() { - return defaultInstance; } - - public AbortProcedureRequest getDefaultInstanceForType() { - return defaultInstance; + private AbortProcedureRequest() { + procId_ = 0L; + mayInterruptIfRunning_ = true; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private AbortProcedureRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -54297,7 +57476,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -54308,30 +57487,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AbortProcedureRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AbortProcedureRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public AbortProcedureRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AbortProcedureRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint64 proc_id = 1; public static final int PROC_ID_FIELD_NUMBER = 1; private long procId_; /** @@ -54347,7 +57510,6 @@ public final class MasterProtos { return procId_; } - // optional bool mayInterruptIfRunning = 2 [default = true]; public static final int MAYINTERRUPTIFRUNNING_FIELD_NUMBER = 2; private boolean mayInterruptIfRunning_; /** @@ -54363,14 +57525,11 @@ public final class MasterProtos { return mayInterruptIfRunning_; } - private void initFields() { - procId_ = 0L; - mayInterruptIfRunning_ = true; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasProcId()) { memoizedIsInitialized = 0; @@ -54382,19 +57541,17 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, procId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, mayInterruptIfRunning_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -54406,19 +57563,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, mayInterruptIfRunning_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -54439,12 +57590,10 @@ public final class MasterProtos { result = result && (getMayInterruptIfRunning() == other.getMayInterruptIfRunning()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -54454,13 +57603,15 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } if (hasMayInterruptIfRunning()) { hash = (37 * hash) + MAYINTERRUPTIFRUNNING_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMayInterruptIfRunning()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getMayInterruptIfRunning()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -54488,46 +57639,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -54535,14 +57697,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.AbortProcedureRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.AbortProcedureRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AbortProcedureRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AbortProcedureRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -54555,18 +57718,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); procId_ = 0L; @@ -54576,10 +57736,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AbortProcedureRequest_descriptor; @@ -54614,6 +57770,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest)other); @@ -54631,13 +57813,13 @@ public final class MasterProtos { if (other.hasMayInterruptIfRunning()) { setMayInterruptIfRunning(other.getMayInterruptIfRunning()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasProcId()) { - return false; } return true; @@ -54652,7 +57834,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -54662,7 +57844,6 @@ public final class MasterProtos { } private int bitField0_; - // required uint64 proc_id = 1; private long procId_ ; /** * required uint64 proc_id = 1; @@ -54695,7 +57876,6 @@ public final class MasterProtos { return this; } - // optional bool mayInterruptIfRunning = 2 [default = true]; private boolean mayInterruptIfRunning_ = true; /** * optional bool mayInterruptIfRunning = 2 [default = true]; @@ -54727,22 +57907,59 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.AbortProcedureRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.AbortProcedureRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest DEFAULT_INSTANCE; static { - defaultInstance = new AbortProcedureRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AbortProcedureRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AbortProcedureRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.AbortProcedureRequest) } - public interface AbortProcedureResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface AbortProcedureResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.AbortProcedureResponse) + com.google.protobuf.MessageOrBuilder { - // required bool is_procedure_aborted = 1; /** * required bool is_procedure_aborted = 1; */ @@ -54755,36 +57972,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.AbortProcedureResponse} */ - public static final class AbortProcedureResponse extends - com.google.protobuf.GeneratedMessage - implements AbortProcedureResponseOrBuilder { + public static final class AbortProcedureResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.AbortProcedureResponse) + AbortProcedureResponseOrBuilder { // Use AbortProcedureResponse.newBuilder() to construct. - private AbortProcedureResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private AbortProcedureResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private AbortProcedureResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AbortProcedureResponse defaultInstance; - public static AbortProcedureResponse getDefaultInstance() { - return defaultInstance; - } - - public AbortProcedureResponse getDefaultInstanceForType() { - return defaultInstance; + private AbortProcedureResponse() { + isProcedureAborted_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private AbortProcedureResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -54814,7 +58023,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -54825,30 +58034,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AbortProcedureResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AbortProcedureResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public AbortProcedureResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AbortProcedureResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool is_procedure_aborted = 1; public static final int IS_PROCEDURE_ABORTED_FIELD_NUMBER = 1; private boolean isProcedureAborted_; /** @@ -54864,13 +58057,11 @@ public final class MasterProtos { return isProcedureAborted_; } - private void initFields() { - isProcedureAborted_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasIsProcedureAborted()) { memoizedIsInitialized = 0; @@ -54882,16 +58073,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, isProcedureAborted_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -54899,19 +58088,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, isProcedureAborted_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -54927,12 +58110,10 @@ public final class MasterProtos { result = result && (getIsProcedureAborted() == other.getIsProcedureAborted()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -54942,9 +58123,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasIsProcedureAborted()) { hash = (37 * hash) + IS_PROCEDURE_ABORTED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getIsProcedureAborted()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIsProcedureAborted()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -54972,46 +58154,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -55019,14 +58212,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.AbortProcedureResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.AbortProcedureResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AbortProcedureResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AbortProcedureResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -55039,18 +58233,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); isProcedureAborted_ = false; @@ -55058,10 +58249,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AbortProcedureResponse_descriptor; @@ -55092,6 +58279,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse)other); @@ -55106,13 +58319,13 @@ public final class MasterProtos { if (other.hasIsProcedureAborted()) { setIsProcedureAborted(other.getIsProcedureAborted()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasIsProcedureAborted()) { - return false; } return true; @@ -55127,7 +58340,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -55137,7 +58350,6 @@ public final class MasterProtos { } private int bitField0_; - // required bool is_procedure_aborted = 1; private boolean isProcedureAborted_ ; /** * required bool is_procedure_aborted = 1; @@ -55169,54 +58381,83 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.AbortProcedureResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.AbortProcedureResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse DEFAULT_INSTANCE; static { - defaultInstance = new AbortProcedureResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AbortProcedureResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AbortProcedureResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.AbortProcedureResponse) } - public interface ListProceduresRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ListProceduresRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ListProceduresRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.ListProceduresRequest} */ - public static final class ListProceduresRequest extends - com.google.protobuf.GeneratedMessage - implements ListProceduresRequestOrBuilder { + public static final class ListProceduresRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ListProceduresRequest) + ListProceduresRequestOrBuilder { // Use ListProceduresRequest.newBuilder() to construct. - private ListProceduresRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ListProceduresRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ListProceduresRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ListProceduresRequest defaultInstance; - public static ListProceduresRequest getDefaultInstance() { - return defaultInstance; } - - public ListProceduresRequest getDefaultInstanceForType() { - return defaultInstance; + private ListProceduresRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ListProceduresRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -55240,7 +58481,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -55251,34 +58492,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ListProceduresRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ListProceduresRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -55286,29 +58511,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -55319,12 +58536,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -55332,7 +58547,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -55360,46 +58575,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -55407,14 +58633,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ListProceduresRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ListProceduresRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -55427,27 +58654,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresRequest_descriptor; @@ -55471,6 +58691,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest)other); @@ -55482,7 +58728,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -55499,7 +58746,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -55507,22 +58754,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ListProceduresRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ListProceduresRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest DEFAULT_INSTANCE; static { - defaultInstance = new ListProceduresRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ListProceduresRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListProceduresRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ListProceduresRequest) } - public interface ListProceduresResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ListProceduresResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ListProceduresResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.Procedure procedure = 1; /** * repeated .hbase.pb.Procedure procedure = 1; */ @@ -55550,36 +58834,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.ListProceduresResponse} */ - public static final class ListProceduresResponse extends - com.google.protobuf.GeneratedMessage - implements ListProceduresResponseOrBuilder { + public static final class ListProceduresResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ListProceduresResponse) + ListProceduresResponseOrBuilder { // Use ListProceduresResponse.newBuilder() to construct. - private ListProceduresResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ListProceduresResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ListProceduresResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ListProceduresResponse defaultInstance; - public static ListProceduresResponse getDefaultInstance() { - return defaultInstance; - } - - public ListProceduresResponse getDefaultInstanceForType() { - return defaultInstance; + private ListProceduresResponse() { + procedure_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ListProceduresResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -55603,7 +58879,8 @@ public final class MasterProtos { procedure_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - procedure_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry)); + procedure_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry)); break; } } @@ -55612,7 +58889,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { procedure_ = java.util.Collections.unmodifiableList(procedure_); @@ -55626,29 +58903,13 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ListProceduresResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ListProceduresResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.Procedure procedure = 1; public static final int PROCEDURE_FIELD_NUMBER = 1; private java.util.List procedure_; /** @@ -55684,13 +58945,11 @@ public final class MasterProtos { return procedure_.get(index); } - private void initFields() { - procedure_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getProcedureCount(); i++) { if (!getProcedure(i).isInitialized()) { @@ -55704,16 +58963,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < procedure_.size(); i++) { output.writeMessage(1, procedure_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -55721,19 +58978,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, procedure_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -55746,12 +58997,10 @@ public final class MasterProtos { boolean result = true; result = result && getProcedureList() .equals(other.getProcedureList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -55763,7 +59012,7 @@ public final class MasterProtos { hash = (37 * hash) + PROCEDURE_FIELD_NUMBER; hash = (53 * hash) + getProcedureList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -55791,46 +59040,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -55838,14 +59098,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.ListProceduresResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ListProceduresResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -55858,19 +59119,16 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getProcedureFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (procedureBuilder_ == null) { @@ -55882,10 +59140,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresResponse_descriptor; @@ -55919,6 +59173,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse)other); @@ -55949,21 +59229,21 @@ public final class MasterProtos { procedure_ = other.procedure_; bitField0_ = (bitField0_ & ~0x00000001); procedureBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getProcedureFieldBuilder() : null; } else { procedureBuilder_.addAllMessages(other.procedure_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getProcedureCount(); i++) { if (!getProcedure(i).isInitialized()) { - return false; } } @@ -55979,7 +59259,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -55989,7 +59269,6 @@ public final class MasterProtos { } private int bitField0_; - // repeated .hbase.pb.Procedure procedure = 1; private java.util.List procedure_ = java.util.Collections.emptyList(); private void ensureProcedureIsMutable() { @@ -55999,7 +59278,7 @@ public final class MasterProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> procedureBuilder_; /** @@ -56131,7 +59410,8 @@ public final class MasterProtos { java.lang.Iterable values) { if (procedureBuilder_ == null) { ensureProcedureIsMutable(); - super.addAll(values, procedure_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, procedure_); onChanged(); } else { procedureBuilder_.addAllMessages(values); @@ -56214,11 +59494,11 @@ public final class MasterProtos { getProcedureBuilderList() { return getProcedureFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> getProcedureFieldBuilder() { if (procedureBuilder_ == null) { - procedureBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + procedureBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>( procedure_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -56228,22 +59508,59 @@ public final class MasterProtos { } return procedureBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ListProceduresResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ListProceduresResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse DEFAULT_INSTANCE; static { - defaultInstance = new ListProceduresResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ListProceduresResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListProceduresResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ListProceduresResponse) } - public interface SetQuotaRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SetQuotaRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SetQuotaRequest) + com.google.protobuf.MessageOrBuilder { - // optional string user_name = 1; /** * optional string user_name = 1; */ @@ -56258,7 +59575,6 @@ public final class MasterProtos { com.google.protobuf.ByteString getUserNameBytes(); - // optional string user_group = 2; /** * optional string user_group = 2; */ @@ -56273,7 +59589,6 @@ public final class MasterProtos { com.google.protobuf.ByteString getUserGroupBytes(); - // optional string namespace = 3; /** * optional string namespace = 3; */ @@ -56288,7 +59603,6 @@ public final class MasterProtos { com.google.protobuf.ByteString getNamespaceBytes(); - // optional .hbase.pb.TableName table_name = 4; /** * optional .hbase.pb.TableName table_name = 4; */ @@ -56302,7 +59616,6 @@ public final class MasterProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // optional bool remove_all = 5; /** * optional bool remove_all = 5; */ @@ -56312,7 +59625,6 @@ public final class MasterProtos { */ boolean getRemoveAll(); - // optional bool bypass_globals = 6; /** * optional bool bypass_globals = 6; */ @@ -56322,7 +59634,6 @@ public final class MasterProtos { */ boolean getBypassGlobals(); - // optional .hbase.pb.ThrottleRequest throttle = 7; /** * optional .hbase.pb.ThrottleRequest throttle = 7; */ @@ -56339,36 +59650,32 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.SetQuotaRequest} */ - public static final class SetQuotaRequest extends - com.google.protobuf.GeneratedMessage - implements SetQuotaRequestOrBuilder { + public static final class SetQuotaRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SetQuotaRequest) + SetQuotaRequestOrBuilder { // Use SetQuotaRequest.newBuilder() to construct. - private SetQuotaRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private SetQuotaRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SetQuotaRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SetQuotaRequest defaultInstance; - public static SetQuotaRequest getDefaultInstance() { - return defaultInstance; } - - public SetQuotaRequest getDefaultInstanceForType() { - return defaultInstance; + private SetQuotaRequest() { + userName_ = ""; + userGroup_ = ""; + namespace_ = ""; + removeAll_ = false; + bypassGlobals_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SetQuotaRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -56388,18 +59695,21 @@ public final class MasterProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - userName_ = input.readBytes(); + userName_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - userGroup_ = input.readBytes(); + userGroup_ = bs; break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - namespace_ = input.readBytes(); + namespace_ = bs; break; } case 34: { @@ -56444,7 +59754,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -56455,32 +59765,16 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetQuotaRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetQuotaRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetQuotaRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SetQuotaRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional string user_name = 1; public static final int USER_NAME_FIELD_NUMBER = 1; - private java.lang.Object userName_; + private volatile java.lang.Object userName_; /** * optional string user_name = 1; */ @@ -56521,9 +59815,8 @@ public final class MasterProtos { } } - // optional string user_group = 2; public static final int USER_GROUP_FIELD_NUMBER = 2; - private java.lang.Object userGroup_; + private volatile java.lang.Object userGroup_; /** * optional string user_group = 2; */ @@ -56564,9 +59857,8 @@ public final class MasterProtos { } } - // optional string namespace = 3; public static final int NAMESPACE_FIELD_NUMBER = 3; - private java.lang.Object namespace_; + private volatile java.lang.Object namespace_; /** * optional string namespace = 3; */ @@ -56607,7 +59899,6 @@ public final class MasterProtos { } } - // optional .hbase.pb.TableName table_name = 4; public static final int TABLE_NAME_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -56620,16 +59911,15 @@ public final class MasterProtos { * optional .hbase.pb.TableName table_name = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * optional .hbase.pb.TableName table_name = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // optional bool remove_all = 5; public static final int REMOVE_ALL_FIELD_NUMBER = 5; private boolean removeAll_; /** @@ -56645,7 +59935,6 @@ public final class MasterProtos { return removeAll_; } - // optional bool bypass_globals = 6; public static final int BYPASS_GLOBALS_FIELD_NUMBER = 6; private boolean bypassGlobals_; /** @@ -56661,7 +59950,6 @@ public final class MasterProtos { return bypassGlobals_; } - // optional .hbase.pb.ThrottleRequest throttle = 7; public static final int THROTTLE_FIELD_NUMBER = 7; private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest throttle_; /** @@ -56674,28 +59962,20 @@ public final class MasterProtos { * optional .hbase.pb.ThrottleRequest throttle = 7; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest getThrottle() { - return throttle_; + return throttle_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance() : throttle_; } /** * optional .hbase.pb.ThrottleRequest throttle = 7; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder getThrottleOrBuilder() { - return throttle_; + return throttle_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance() : throttle_; } - private void initFields() { - userName_ = ""; - userGroup_ = ""; - namespace_ = ""; - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - removeAll_ = false; - bypassGlobals_ = false; - throttle_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasTableName()) { if (!getTableName().isInitialized()) { @@ -56715,18 +59995,17 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getUserNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, userName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getUserGroupBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, userGroup_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getNamespaceBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, namespace_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, tableName_); + output.writeMessage(4, getTableName()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, removeAll_); @@ -56735,32 +60014,28 @@ public final class MasterProtos { output.writeBool(6, bypassGlobals_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { - output.writeMessage(7, throttle_); + output.writeMessage(7, getThrottle()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getUserNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, userName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getUserGroupBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, userGroup_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getNamespaceBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, namespace_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, tableName_); + .computeMessageSize(4, getTableName()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream @@ -56772,21 +60047,15 @@ public final class MasterProtos { } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(7, throttle_); + .computeMessageSize(7, getThrottle()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -56832,12 +60101,10 @@ public final class MasterProtos { result = result && getThrottle() .equals(other.getThrottle()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -56863,17 +60130,19 @@ public final class MasterProtos { } if (hasRemoveAll()) { hash = (37 * hash) + REMOVE_ALL_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getRemoveAll()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getRemoveAll()); } if (hasBypassGlobals()) { hash = (37 * hash) + BYPASS_GLOBALS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getBypassGlobals()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getBypassGlobals()); } if (hasThrottle()) { hash = (37 * hash) + THROTTLE_FIELD_NUMBER; hash = (53 * hash) + getThrottle().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -56901,46 +60170,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -56948,14 +60228,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SetQuotaRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SetQuotaRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetQuotaRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetQuotaRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -56968,20 +60249,17 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getThrottleFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); userName_ = ""; @@ -56991,7 +60269,7 @@ public final class MasterProtos { namespace_ = ""; bitField0_ = (bitField0_ & ~0x00000004); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -57001,7 +60279,7 @@ public final class MasterProtos { bypassGlobals_ = false; bitField0_ = (bitField0_ & ~0x00000020); if (throttleBuilder_ == null) { - throttle_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance(); + throttle_ = null; } else { throttleBuilder_.clear(); } @@ -57009,10 +60287,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetQuotaRequest_descriptor; @@ -57075,6 +60349,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest)other); @@ -57113,20 +60413,19 @@ public final class MasterProtos { if (other.hasThrottle()) { mergeThrottle(other.getThrottle()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasTableName()) { if (!getTableName().isInitialized()) { - return false; } } if (hasThrottle()) { if (!getThrottle().isInitialized()) { - return false; } } @@ -57142,7 +60441,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -57152,7 +60451,6 @@ public final class MasterProtos { } private int bitField0_; - // optional string user_name = 1; private java.lang.Object userName_ = ""; /** * optional string user_name = 1; @@ -57166,9 +60464,12 @@ public final class MasterProtos { public java.lang.String getUserName() { java.lang.Object ref = userName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - userName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + userName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -57226,7 +60527,6 @@ public final class MasterProtos { return this; } - // optional string user_group = 2; private java.lang.Object userGroup_ = ""; /** * optional string user_group = 2; @@ -57240,9 +60540,12 @@ public final class MasterProtos { public java.lang.String getUserGroup() { java.lang.Object ref = userGroup_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - userGroup_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + userGroup_ = s; + } return s; } else { return (java.lang.String) ref; @@ -57300,7 +60603,6 @@ public final class MasterProtos { return this; } - // optional string namespace = 3; private java.lang.Object namespace_ = ""; /** * optional string namespace = 3; @@ -57314,9 +60616,12 @@ public final class MasterProtos { public java.lang.String getNamespace() { java.lang.Object ref = namespace_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - namespace_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + namespace_ = s; + } return s; } else { return (java.lang.String) ref; @@ -57374,9 +60679,8 @@ public final class MasterProtos { return this; } - // optional .hbase.pb.TableName table_name = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * optional .hbase.pb.TableName table_name = 4; @@ -57389,7 +60693,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -57430,6 +60734,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -57448,7 +60753,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -57471,19 +60776,20 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * optional .hbase.pb.TableName table_name = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -57491,7 +60797,6 @@ public final class MasterProtos { return tableNameBuilder_; } - // optional bool remove_all = 5; private boolean removeAll_ ; /** * optional bool remove_all = 5; @@ -57524,7 +60829,6 @@ public final class MasterProtos { return this; } - // optional bool bypass_globals = 6; private boolean bypassGlobals_ ; /** * optional bool bypass_globals = 6; @@ -57557,9 +60861,8 @@ public final class MasterProtos { return this; } - // optional .hbase.pb.ThrottleRequest throttle = 7; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest throttle_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest throttle_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder> throttleBuilder_; /** * optional .hbase.pb.ThrottleRequest throttle = 7; @@ -57572,7 +60875,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest getThrottle() { if (throttleBuilder_ == null) { - return throttle_; + return throttle_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance() : throttle_; } else { return throttleBuilder_.getMessage(); } @@ -57613,6 +60916,7 @@ public final class MasterProtos { public Builder mergeThrottle(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest value) { if (throttleBuilder_ == null) { if (((bitField0_ & 0x00000040) == 0x00000040) && + throttle_ != null && throttle_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance()) { throttle_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.newBuilder(throttle_).mergeFrom(value).buildPartial(); @@ -57631,7 +60935,7 @@ public final class MasterProtos { */ public Builder clearThrottle() { if (throttleBuilder_ == null) { - throttle_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance(); + throttle_ = null; onChanged(); } else { throttleBuilder_.clear(); @@ -57654,73 +60958,103 @@ public final class MasterProtos { if (throttleBuilder_ != null) { return throttleBuilder_.getMessageOrBuilder(); } else { - return throttle_; + return throttle_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance() : throttle_; } } /** * optional .hbase.pb.ThrottleRequest throttle = 7; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder> getThrottleFieldBuilder() { if (throttleBuilder_ == null) { - throttleBuilder_ = new com.google.protobuf.SingleFieldBuilder< + throttleBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder>( - throttle_, + getThrottle(), getParentForChildren(), isClean()); throttle_ = null; } return throttleBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SetQuotaRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.SetQuotaRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest DEFAULT_INSTANCE; static { - defaultInstance = new SetQuotaRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SetQuotaRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetQuotaRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SetQuotaRequest) } - public interface SetQuotaResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SetQuotaResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SetQuotaResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.SetQuotaResponse} */ - public static final class SetQuotaResponse extends - com.google.protobuf.GeneratedMessage - implements SetQuotaResponseOrBuilder { + public static final class SetQuotaResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SetQuotaResponse) + SetQuotaResponseOrBuilder { // Use SetQuotaResponse.newBuilder() to construct. - private SetQuotaResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private SetQuotaResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SetQuotaResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SetQuotaResponse defaultInstance; - public static SetQuotaResponse getDefaultInstance() { - return defaultInstance; } - - public SetQuotaResponse getDefaultInstanceForType() { - return defaultInstance; + private SetQuotaResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SetQuotaResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -57744,7 +61078,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -57755,34 +61089,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetQuotaResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetQuotaResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetQuotaResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SetQuotaResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -57790,29 +61108,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -57823,12 +61133,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -57836,7 +61144,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -57864,46 +61172,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -57911,14 +61230,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SetQuotaResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SetQuotaResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetQuotaResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetQuotaResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -57931,27 +61251,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetQuotaResponse_descriptor; @@ -57975,6 +61288,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse)other); @@ -57986,7 +61325,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -58003,7 +61343,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -58011,22 +61351,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SetQuotaResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.SetQuotaResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse DEFAULT_INSTANCE; static { - defaultInstance = new SetQuotaResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SetQuotaResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetQuotaResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SetQuotaResponse) } - public interface MajorCompactionTimestampRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MajorCompactionTimestampRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MajorCompactionTimestampRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -58043,36 +61420,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.MajorCompactionTimestampRequest} */ - public static final class MajorCompactionTimestampRequest extends - com.google.protobuf.GeneratedMessage - implements MajorCompactionTimestampRequestOrBuilder { + public static final class MajorCompactionTimestampRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MajorCompactionTimestampRequest) + MajorCompactionTimestampRequestOrBuilder { // Use MajorCompactionTimestampRequest.newBuilder() to construct. - private MajorCompactionTimestampRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private MajorCompactionTimestampRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MajorCompactionTimestampRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MajorCompactionTimestampRequest defaultInstance; - public static MajorCompactionTimestampRequest getDefaultInstance() { - return defaultInstance; } - - public MajorCompactionTimestampRequest getDefaultInstanceForType() { - return defaultInstance; + private MajorCompactionTimestampRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MajorCompactionTimestampRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -58110,7 +61478,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -58121,30 +61489,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MajorCompactionTimestampRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MajorCompactionTimestampRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -58157,22 +61509,20 @@ public final class MasterProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -58188,36 +61538,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -58233,12 +61575,10 @@ public final class MasterProtos { result = result && getTableName() .equals(other.getTableName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -58250,7 +61590,7 @@ public final class MasterProtos { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -58278,46 +61618,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -58325,14 +61676,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.MajorCompactionTimestampRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MajorCompactionTimestampRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -58345,23 +61697,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -58369,10 +61718,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor; @@ -58407,6 +61752,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)other); @@ -58421,17 +61792,16 @@ public final class MasterProtos { if (other.hasTableName()) { mergeTableName(other.getTableName()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!getTableName().isInitialized()) { - return false; } return true; @@ -58446,7 +61816,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -58456,9 +61826,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -58471,7 +61840,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -58512,6 +61881,7 @@ public final class MasterProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -58530,7 +61900,7 @@ public final class MasterProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -58553,41 +61923,79 @@ public final class MasterProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MajorCompactionTimestampRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.MajorCompactionTimestampRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest DEFAULT_INSTANCE; static { - defaultInstance = new MajorCompactionTimestampRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MajorCompactionTimestampRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MajorCompactionTimestampRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MajorCompactionTimestampRequest) } - public interface MajorCompactionTimestampForRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MajorCompactionTimestampForRegionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MajorCompactionTimestampForRegionRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier region = 1; /** * required .hbase.pb.RegionSpecifier region = 1; */ @@ -58604,36 +62012,27 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.MajorCompactionTimestampForRegionRequest} */ - public static final class MajorCompactionTimestampForRegionRequest extends - com.google.protobuf.GeneratedMessage - implements MajorCompactionTimestampForRegionRequestOrBuilder { + public static final class MajorCompactionTimestampForRegionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MajorCompactionTimestampForRegionRequest) + MajorCompactionTimestampForRegionRequestOrBuilder { // Use MajorCompactionTimestampForRegionRequest.newBuilder() to construct. - private MajorCompactionTimestampForRegionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private MajorCompactionTimestampForRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MajorCompactionTimestampForRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MajorCompactionTimestampForRegionRequest defaultInstance; - public static MajorCompactionTimestampForRegionRequest getDefaultInstance() { - return defaultInstance; } - - public MajorCompactionTimestampForRegionRequest getDefaultInstanceForType() { - return defaultInstance; + private MajorCompactionTimestampForRegionRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MajorCompactionTimestampForRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -58671,7 +62070,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -58682,30 +62081,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MajorCompactionTimestampForRegionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MajorCompactionTimestampForRegionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** @@ -58718,22 +62101,20 @@ public final class MasterProtos { * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * required .hbase.pb.RegionSpecifier region = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } - private void initFields() { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; @@ -58749,36 +62130,28 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); + output.writeMessage(1, getRegion()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); + .computeMessageSize(1, getRegion()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -58794,12 +62167,10 @@ public final class MasterProtos { result = result && getRegion() .equals(other.getRegion()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -58811,7 +62182,7 @@ public final class MasterProtos { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -58839,46 +62210,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -58886,14 +62268,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.MajorCompactionTimestampForRegionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MajorCompactionTimestampForRegionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -58906,23 +62289,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; } else { regionBuilder_.clear(); } @@ -58930,10 +62310,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor; @@ -58968,6 +62344,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)other); @@ -58982,17 +62384,16 @@ public final class MasterProtos { if (other.hasRegion()) { mergeRegion(other.getRegion()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { - return false; } if (!getRegion().isInitialized()) { - return false; } return true; @@ -59007,7 +62408,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -59017,9 +62418,8 @@ public final class MasterProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier region = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * required .hbase.pb.RegionSpecifier region = 1; @@ -59032,7 +62432,7 @@ public final class MasterProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { - return region_; + return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } @@ -59073,6 +62473,7 @@ public final class MasterProtos { public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); @@ -59091,7 +62492,7 @@ public final class MasterProtos { */ public Builder clearRegion() { if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + region_ = null; onChanged(); } else { regionBuilder_.clear(); @@ -59114,41 +62515,79 @@ public final class MasterProtos { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { - return region_; + return region_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * required .hbase.pb.RegionSpecifier region = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, + getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MajorCompactionTimestampForRegionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.MajorCompactionTimestampForRegionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest DEFAULT_INSTANCE; static { - defaultInstance = new MajorCompactionTimestampForRegionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MajorCompactionTimestampForRegionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MajorCompactionTimestampForRegionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MajorCompactionTimestampForRegionRequest) } - public interface MajorCompactionTimestampResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MajorCompactionTimestampResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MajorCompactionTimestampResponse) + com.google.protobuf.MessageOrBuilder { - // required int64 compaction_timestamp = 1; /** * required int64 compaction_timestamp = 1; */ @@ -59161,36 +62600,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.MajorCompactionTimestampResponse} */ - public static final class MajorCompactionTimestampResponse extends - com.google.protobuf.GeneratedMessage - implements MajorCompactionTimestampResponseOrBuilder { + public static final class MajorCompactionTimestampResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MajorCompactionTimestampResponse) + MajorCompactionTimestampResponseOrBuilder { // Use MajorCompactionTimestampResponse.newBuilder() to construct. - private MajorCompactionTimestampResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private MajorCompactionTimestampResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MajorCompactionTimestampResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MajorCompactionTimestampResponse defaultInstance; - public static MajorCompactionTimestampResponse getDefaultInstance() { - return defaultInstance; } - - public MajorCompactionTimestampResponse getDefaultInstanceForType() { - return defaultInstance; + private MajorCompactionTimestampResponse() { + compactionTimestamp_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MajorCompactionTimestampResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -59220,7 +62651,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -59231,30 +62662,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MajorCompactionTimestampResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MajorCompactionTimestampResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required int64 compaction_timestamp = 1; public static final int COMPACTION_TIMESTAMP_FIELD_NUMBER = 1; private long compactionTimestamp_; /** @@ -59270,13 +62685,11 @@ public final class MasterProtos { return compactionTimestamp_; } - private void initFields() { - compactionTimestamp_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasCompactionTimestamp()) { memoizedIsInitialized = 0; @@ -59288,16 +62701,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, compactionTimestamp_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -59305,19 +62716,13 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, compactionTimestamp_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -59333,12 +62738,10 @@ public final class MasterProtos { result = result && (getCompactionTimestamp() == other.getCompactionTimestamp()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -59348,9 +62751,10 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCompactionTimestamp()) { hash = (37 * hash) + COMPACTION_TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCompactionTimestamp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getCompactionTimestamp()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -59378,46 +62782,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -59425,14 +62840,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.MajorCompactionTimestampResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MajorCompactionTimestampResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -59445,18 +62861,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); compactionTimestamp_ = 0L; @@ -59464,10 +62877,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor; @@ -59498,6 +62907,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse)other); @@ -59512,13 +62947,13 @@ public final class MasterProtos { if (other.hasCompactionTimestamp()) { setCompactionTimestamp(other.getCompactionTimestamp()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasCompactionTimestamp()) { - return false; } return true; @@ -59533,7 +62968,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -59543,7 +62978,6 @@ public final class MasterProtos { } private int bitField0_; - // required int64 compaction_timestamp = 1; private long compactionTimestamp_ ; /** * required int64 compaction_timestamp = 1; @@ -59575,54 +63009,83 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MajorCompactionTimestampResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.MajorCompactionTimestampResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse DEFAULT_INSTANCE; static { - defaultInstance = new MajorCompactionTimestampResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MajorCompactionTimestampResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MajorCompactionTimestampResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MajorCompactionTimestampResponse) } - public interface SecurityCapabilitiesRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SecurityCapabilitiesRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SecurityCapabilitiesRequest) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.SecurityCapabilitiesRequest} */ - public static final class SecurityCapabilitiesRequest extends - com.google.protobuf.GeneratedMessage - implements SecurityCapabilitiesRequestOrBuilder { + public static final class SecurityCapabilitiesRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SecurityCapabilitiesRequest) + SecurityCapabilitiesRequestOrBuilder { // Use SecurityCapabilitiesRequest.newBuilder() to construct. - private SecurityCapabilitiesRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private SecurityCapabilitiesRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private SecurityCapabilitiesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SecurityCapabilitiesRequest defaultInstance; - public static SecurityCapabilitiesRequest getDefaultInstance() { - return defaultInstance; - } - - public SecurityCapabilitiesRequest getDefaultInstanceForType() { - return defaultInstance; + private SecurityCapabilitiesRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SecurityCapabilitiesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -59646,7 +63109,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -59657,34 +63120,18 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SecurityCapabilitiesRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SecurityCapabilitiesRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -59692,29 +63139,21 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -59725,12 +63164,10 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -59738,7 +63175,7 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -59766,46 +63203,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -59813,14 +63261,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SecurityCapabilitiesRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SecurityCapabilitiesRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -59833,27 +63282,20 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor; @@ -59877,6 +63319,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)other); @@ -59888,7 +63356,8 @@ public final class MasterProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -59905,7 +63374,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -59913,22 +63382,59 @@ public final class MasterProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SecurityCapabilitiesRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.SecurityCapabilitiesRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest DEFAULT_INSTANCE; static { - defaultInstance = new SecurityCapabilitiesRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SecurityCapabilitiesRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SecurityCapabilitiesRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SecurityCapabilitiesRequest) } - public interface SecurityCapabilitiesResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SecurityCapabilitiesResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SecurityCapabilitiesResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; /** * repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; */ @@ -59945,36 +63451,28 @@ public final class MasterProtos { /** * Protobuf type {@code hbase.pb.SecurityCapabilitiesResponse} */ - public static final class SecurityCapabilitiesResponse extends - com.google.protobuf.GeneratedMessage - implements SecurityCapabilitiesResponseOrBuilder { + public static final class SecurityCapabilitiesResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SecurityCapabilitiesResponse) + SecurityCapabilitiesResponseOrBuilder { // Use SecurityCapabilitiesResponse.newBuilder() to construct. - private SecurityCapabilitiesResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private SecurityCapabilitiesResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SecurityCapabilitiesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SecurityCapabilitiesResponse defaultInstance; - public static SecurityCapabilitiesResponse getDefaultInstance() { - return defaultInstance; } - - public SecurityCapabilitiesResponse getDefaultInstanceForType() { - return defaultInstance; + private SecurityCapabilitiesResponse() { + capabilities_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SecurityCapabilitiesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -60000,10 +63498,10 @@ public final class MasterProtos { unknownFields.mergeVarintField(1, rawValue); } else { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - capabilities_ = new java.util.ArrayList(); + capabilities_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - capabilities_.add(value); + capabilities_.add(rawValue); } break; } @@ -60017,10 +63515,10 @@ public final class MasterProtos { unknownFields.mergeVarintField(1, rawValue); } else { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - capabilities_ = new java.util.ArrayList(); + capabilities_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - capabilities_.add(value); + capabilities_.add(rawValue); } } input.popLimit(oldLimit); @@ -60032,7 +63530,7 @@ public final class MasterProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { capabilities_ = java.util.Collections.unmodifiableList(capabilities_); @@ -60046,28 +63544,13 @@ public final class MasterProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SecurityCapabilitiesResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SecurityCapabilitiesResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.SecurityCapabilitiesResponse.Capability} */ @@ -60076,23 +63559,23 @@ public final class MasterProtos { /** * SIMPLE_AUTHENTICATION = 0; */ - SIMPLE_AUTHENTICATION(0, 0), + SIMPLE_AUTHENTICATION(0), /** * SECURE_AUTHENTICATION = 1; */ - SECURE_AUTHENTICATION(1, 1), + SECURE_AUTHENTICATION(1), /** * AUTHORIZATION = 2; */ - AUTHORIZATION(2, 2), + AUTHORIZATION(2), /** * CELL_AUTHORIZATION = 3; */ - CELL_AUTHORIZATION(3, 3), + CELL_AUTHORIZATION(3), /** * CELL_VISIBILITY = 4; */ - CELL_VISIBILITY(4, 4), + CELL_VISIBILITY(4), ; /** @@ -60117,9 +63600,19 @@ public final class MasterProtos { public static final int CELL_VISIBILITY_VALUE = 4; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static Capability valueOf(int value) { + return forNumber(value); + } + + public static Capability forNumber(int value) { switch (value) { case 0: return SIMPLE_AUTHENTICATION; case 1: return SECURE_AUTHENTICATION; @@ -60134,17 +63627,17 @@ public final class MasterProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + Capability> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public Capability findValueByNumber(int number) { - return Capability.valueOf(number); + return Capability.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -60166,25 +63659,32 @@ public final class MasterProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private Capability(int index, int value) { - this.index = index; + private Capability(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.SecurityCapabilitiesResponse.Capability) } - // repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; public static final int CAPABILITIES_FIELD_NUMBER = 1; - private java.util.List capabilities_; + private java.util.List capabilities_; + private static final com.google.protobuf.Internal.ListAdapter.Converter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> capabilities_converter_ = + new com.google.protobuf.Internal.ListAdapter.Converter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability>() { + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability convert(java.lang.Integer from) { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability result = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability.valueOf(from); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability.SIMPLE_AUTHENTICATION : result; + } + }; /** * repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; */ public java.util.List getCapabilitiesList() { - return capabilities_; + return new com.google.protobuf.Internal.ListAdapter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability>(capabilities_, capabilities_converter_); } /** * repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; @@ -60196,16 +63696,14 @@ public final class MasterProtos { * repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability getCapabilities(int index) { - return capabilities_.get(index); + return capabilities_converter_.convert(capabilities_.get(index)); } - private void initFields() { - capabilities_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -60213,16 +63711,14 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < capabilities_.size(); i++) { - output.writeEnum(1, capabilities_.get(i).getNumber()); + output.writeEnum(1, capabilities_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -60230,24 +63726,18 @@ public final class MasterProtos { int dataSize = 0; for (int i = 0; i < capabilities_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream - .computeEnumSizeNoTag(capabilities_.get(i).getNumber()); + .computeEnumSizeNoTag(capabilities_.get(i)); } size += dataSize; size += 1 * capabilities_.size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -60258,14 +63748,11 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) obj; boolean result = true; - result = result && getCapabilitiesList() - .equals(other.getCapabilitiesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && capabilities_.equals(other.capabilities_); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -60275,9 +63762,9 @@ public final class MasterProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (getCapabilitiesCount() > 0) { hash = (37 * hash) + CAPABILITIES_FIELD_NUMBER; - hash = (53 * hash) + hashEnumList(getCapabilitiesList()); + hash = (53 * hash) + capabilities_.hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -60305,46 +63792,57 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -60352,14 +63850,15 @@ public final class MasterProtos { * Protobuf type {@code hbase.pb.SecurityCapabilitiesResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SecurityCapabilitiesResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -60372,18 +63871,15 @@ public final class MasterProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); capabilities_ = java.util.Collections.emptyList(); @@ -60391,10 +63887,6 @@ public final class MasterProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor; @@ -60424,6 +63916,32 @@ public final class MasterProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse)other); @@ -60445,7 +63963,8 @@ public final class MasterProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -60462,7 +63981,7 @@ public final class MasterProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -60472,12 +63991,11 @@ public final class MasterProtos { } private int bitField0_; - // repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; - private java.util.List capabilities_ = + private java.util.List capabilities_ = java.util.Collections.emptyList(); private void ensureCapabilitiesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - capabilities_ = new java.util.ArrayList(capabilities_); + capabilities_ = new java.util.ArrayList(capabilities_); bitField0_ |= 0x00000001; } } @@ -60485,7 +64003,8 @@ public final class MasterProtos { * repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; */ public java.util.List getCapabilitiesList() { - return java.util.Collections.unmodifiableList(capabilities_); + return new com.google.protobuf.Internal.ListAdapter< + java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability>(capabilities_, capabilities_converter_); } /** * repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; @@ -60497,7 +64016,7 @@ public final class MasterProtos { * repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability getCapabilities(int index) { - return capabilities_.get(index); + return capabilities_converter_.convert(capabilities_.get(index)); } /** * repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1; @@ -60508,7 +64027,7 @@ public final class MasterProtos { throw new NullPointerException(); } ensureCapabilitiesIsMutable(); - capabilities_.set(index, value); + capabilities_.set(index, value.getNumber()); onChanged(); return this; } @@ -60520,7 +64039,7 @@ public final class MasterProtos { throw new NullPointerException(); } ensureCapabilitiesIsMutable(); - capabilities_.add(value); + capabilities_.add(value.getNumber()); onChanged(); return this; } @@ -60530,7 +64049,9 @@ public final class MasterProtos { public Builder addAllCapabilities( java.lang.Iterable values) { ensureCapabilitiesIsMutable(); - super.addAll(values, capabilities_); + for (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value : values) { + capabilities_.add(value.getNumber()); + } onChanged(); return this; } @@ -60543,16 +64064,53 @@ public final class MasterProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SecurityCapabilitiesResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.SecurityCapabilitiesResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse DEFAULT_INSTANCE; static { - defaultInstance = new SecurityCapabilitiesResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SecurityCapabilitiesResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SecurityCapabilitiesResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SecurityCapabilitiesResponse) } /** @@ -60564,11 +64122,11 @@ public final class MasterProtos { public interface Interface { /** - * rpc GetSchemaAlterStatus(.hbase.pb.GetSchemaAlterStatusRequest) returns (.hbase.pb.GetSchemaAlterStatusResponse); - * *
        ** Used by the client to get the number of regions that have received the updated schema 
        * 
+ * + * rpc GetSchemaAlterStatus(.hbase.pb.GetSchemaAlterStatusRequest) returns (.hbase.pb.GetSchemaAlterStatusResponse); */ public abstract void getSchemaAlterStatus( com.google.protobuf.RpcController controller, @@ -60576,11 +64134,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetTableDescriptors(.hbase.pb.GetTableDescriptorsRequest) returns (.hbase.pb.GetTableDescriptorsResponse); - * *
        ** Get list of TableDescriptors for requested tables. 
        * 
+ * + * rpc GetTableDescriptors(.hbase.pb.GetTableDescriptorsRequest) returns (.hbase.pb.GetTableDescriptorsResponse); */ public abstract void getTableDescriptors( com.google.protobuf.RpcController controller, @@ -60588,11 +64146,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetTableNames(.hbase.pb.GetTableNamesRequest) returns (.hbase.pb.GetTableNamesResponse); - * *
        ** Get the list of table names. 
        * 
+ * + * rpc GetTableNames(.hbase.pb.GetTableNamesRequest) returns (.hbase.pb.GetTableNamesResponse); */ public abstract void getTableNames( com.google.protobuf.RpcController controller, @@ -60600,11 +64158,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetClusterStatus(.hbase.pb.GetClusterStatusRequest) returns (.hbase.pb.GetClusterStatusResponse); - * *
        ** Return cluster status. 
        * 
+ * + * rpc GetClusterStatus(.hbase.pb.GetClusterStatusRequest) returns (.hbase.pb.GetClusterStatusResponse); */ public abstract void getClusterStatus( com.google.protobuf.RpcController controller, @@ -60612,11 +64170,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsMasterRunning(.hbase.pb.IsMasterRunningRequest) returns (.hbase.pb.IsMasterRunningResponse); - * *
        ** return true if master is available 
        * 
+ * + * rpc IsMasterRunning(.hbase.pb.IsMasterRunningRequest) returns (.hbase.pb.IsMasterRunningResponse); */ public abstract void isMasterRunning( com.google.protobuf.RpcController controller, @@ -60624,11 +64182,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc AddColumn(.hbase.pb.AddColumnRequest) returns (.hbase.pb.AddColumnResponse); - * *
        ** Adds a column to the specified table. 
        * 
+ * + * rpc AddColumn(.hbase.pb.AddColumnRequest) returns (.hbase.pb.AddColumnResponse); */ public abstract void addColumn( com.google.protobuf.RpcController controller, @@ -60636,11 +64194,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DeleteColumn(.hbase.pb.DeleteColumnRequest) returns (.hbase.pb.DeleteColumnResponse); - * *
        ** Deletes a column from the specified table. Table must be disabled. 
        * 
+ * + * rpc DeleteColumn(.hbase.pb.DeleteColumnRequest) returns (.hbase.pb.DeleteColumnResponse); */ public abstract void deleteColumn( com.google.protobuf.RpcController controller, @@ -60648,11 +64206,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ModifyColumn(.hbase.pb.ModifyColumnRequest) returns (.hbase.pb.ModifyColumnResponse); - * *
        ** Modifies an existing column on the specified table. 
        * 
+ * + * rpc ModifyColumn(.hbase.pb.ModifyColumnRequest) returns (.hbase.pb.ModifyColumnResponse); */ public abstract void modifyColumn( com.google.protobuf.RpcController controller, @@ -60660,11 +64218,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc MoveRegion(.hbase.pb.MoveRegionRequest) returns (.hbase.pb.MoveRegionResponse); - * *
        ** Move the region region to the destination server. 
        * 
+ * + * rpc MoveRegion(.hbase.pb.MoveRegionRequest) returns (.hbase.pb.MoveRegionResponse); */ public abstract void moveRegion( com.google.protobuf.RpcController controller, @@ -60672,11 +64230,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DispatchMergingRegions(.hbase.pb.DispatchMergingRegionsRequest) returns (.hbase.pb.DispatchMergingRegionsResponse); - * *
        ** Master dispatch merging the regions 
        * 
+ * + * rpc DispatchMergingRegions(.hbase.pb.DispatchMergingRegionsRequest) returns (.hbase.pb.DispatchMergingRegionsResponse); */ public abstract void dispatchMergingRegions( com.google.protobuf.RpcController controller, @@ -60684,11 +64242,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc AssignRegion(.hbase.pb.AssignRegionRequest) returns (.hbase.pb.AssignRegionResponse); - * *
        ** Assign a region to a server chosen at random. 
        * 
+ * + * rpc AssignRegion(.hbase.pb.AssignRegionRequest) returns (.hbase.pb.AssignRegionResponse); */ public abstract void assignRegion( com.google.protobuf.RpcController controller, @@ -60696,8 +64254,6 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc UnassignRegion(.hbase.pb.UnassignRegionRequest) returns (.hbase.pb.UnassignRegionResponse); - * *
        **
        * Unassign a region from current hosting regionserver.  Region will then be
@@ -60705,6 +64261,8 @@ public final class MasterProtos {
        * back to the same server.  Use MoveRegion if you want
        * to control the region movement.
        * 
+ * + * rpc UnassignRegion(.hbase.pb.UnassignRegionRequest) returns (.hbase.pb.UnassignRegionResponse); */ public abstract void unassignRegion( com.google.protobuf.RpcController controller, @@ -60712,8 +64270,6 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc OfflineRegion(.hbase.pb.OfflineRegionRequest) returns (.hbase.pb.OfflineRegionResponse); - * *
        **
        * Offline a region from the assignment manager's in-memory state.  The
@@ -60721,6 +64277,8 @@ public final class MasterProtos {
        * automatically reassign the region as in unassign.   This is a special
        * method, and should only be used by experts or hbck.
        * 
+ * + * rpc OfflineRegion(.hbase.pb.OfflineRegionRequest) returns (.hbase.pb.OfflineRegionResponse); */ public abstract void offlineRegion( com.google.protobuf.RpcController controller, @@ -60728,11 +64286,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DeleteTable(.hbase.pb.DeleteTableRequest) returns (.hbase.pb.DeleteTableResponse); - * *
        ** Deletes a table 
        * 
+ * + * rpc DeleteTable(.hbase.pb.DeleteTableRequest) returns (.hbase.pb.DeleteTableResponse); */ public abstract void deleteTable( com.google.protobuf.RpcController controller, @@ -60740,11 +64298,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc truncateTable(.hbase.pb.TruncateTableRequest) returns (.hbase.pb.TruncateTableResponse); - * *
        ** Truncate a table 
        * 
+ * + * rpc truncateTable(.hbase.pb.TruncateTableRequest) returns (.hbase.pb.TruncateTableResponse); */ public abstract void truncateTable( com.google.protobuf.RpcController controller, @@ -60752,11 +64310,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc EnableTable(.hbase.pb.EnableTableRequest) returns (.hbase.pb.EnableTableResponse); - * *
        ** Puts the table on-line (only needed if table has been previously taken offline) 
        * 
+ * + * rpc EnableTable(.hbase.pb.EnableTableRequest) returns (.hbase.pb.EnableTableResponse); */ public abstract void enableTable( com.google.protobuf.RpcController controller, @@ -60764,11 +64322,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DisableTable(.hbase.pb.DisableTableRequest) returns (.hbase.pb.DisableTableResponse); - * *
        ** Take table offline 
        * 
+ * + * rpc DisableTable(.hbase.pb.DisableTableRequest) returns (.hbase.pb.DisableTableResponse); */ public abstract void disableTable( com.google.protobuf.RpcController controller, @@ -60776,11 +64334,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ModifyTable(.hbase.pb.ModifyTableRequest) returns (.hbase.pb.ModifyTableResponse); - * *
        ** Modify a table's metadata 
        * 
+ * + * rpc ModifyTable(.hbase.pb.ModifyTableRequest) returns (.hbase.pb.ModifyTableResponse); */ public abstract void modifyTable( com.google.protobuf.RpcController controller, @@ -60788,11 +64346,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc CreateTable(.hbase.pb.CreateTableRequest) returns (.hbase.pb.CreateTableResponse); - * *
        ** Creates a new table asynchronously 
        * 
+ * + * rpc CreateTable(.hbase.pb.CreateTableRequest) returns (.hbase.pb.CreateTableResponse); */ public abstract void createTable( com.google.protobuf.RpcController controller, @@ -60800,11 +64358,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc Shutdown(.hbase.pb.ShutdownRequest) returns (.hbase.pb.ShutdownResponse); - * *
        ** Shutdown an HBase cluster. 
        * 
+ * + * rpc Shutdown(.hbase.pb.ShutdownRequest) returns (.hbase.pb.ShutdownResponse); */ public abstract void shutdown( com.google.protobuf.RpcController controller, @@ -60812,11 +64370,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc StopMaster(.hbase.pb.StopMasterRequest) returns (.hbase.pb.StopMasterResponse); - * *
        ** Stop HBase Master only.  Does not shutdown the cluster. 
        * 
+ * + * rpc StopMaster(.hbase.pb.StopMasterRequest) returns (.hbase.pb.StopMasterResponse); */ public abstract void stopMaster( com.google.protobuf.RpcController controller, @@ -60824,12 +64382,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsMasterInMaintenanceMode(.hbase.pb.IsInMaintenanceModeRequest) returns (.hbase.pb.IsInMaintenanceModeResponse); - * *
        **
        * Query whether the Master is in maintenance mode.
        * 
+ * + * rpc IsMasterInMaintenanceMode(.hbase.pb.IsInMaintenanceModeRequest) returns (.hbase.pb.IsInMaintenanceModeResponse); */ public abstract void isMasterInMaintenanceMode( com.google.protobuf.RpcController controller, @@ -60837,14 +64395,14 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc Balance(.hbase.pb.BalanceRequest) returns (.hbase.pb.BalanceResponse); - * *
        **
        * Run the balancer.  Will run the balancer and if regions to move, it will
        * go ahead and do the reassignments.  Can NOT run for various reasons.
        * Check logs.
        * 
+ * + * rpc Balance(.hbase.pb.BalanceRequest) returns (.hbase.pb.BalanceResponse); */ public abstract void balance( com.google.protobuf.RpcController controller, @@ -60852,13 +64410,13 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc SetBalancerRunning(.hbase.pb.SetBalancerRunningRequest) returns (.hbase.pb.SetBalancerRunningResponse); - * *
        **
        * Turn the load balancer on or off.
        * If synchronous is true, it waits until current balance() call, if outstanding, to return.
        * 
+ * + * rpc SetBalancerRunning(.hbase.pb.SetBalancerRunningRequest) returns (.hbase.pb.SetBalancerRunningResponse); */ public abstract void setBalancerRunning( com.google.protobuf.RpcController controller, @@ -60866,12 +64424,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsBalancerEnabled(.hbase.pb.IsBalancerEnabledRequest) returns (.hbase.pb.IsBalancerEnabledResponse); - * *
        **
        * Query whether the Region Balancer is running.
        * 
+ * + * rpc IsBalancerEnabled(.hbase.pb.IsBalancerEnabledRequest) returns (.hbase.pb.IsBalancerEnabledResponse); */ public abstract void isBalancerEnabled( com.google.protobuf.RpcController controller, @@ -60879,13 +64437,13 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc SetSplitOrMergeEnabled(.hbase.pb.SetSplitOrMergeEnabledRequest) returns (.hbase.pb.SetSplitOrMergeEnabledResponse); - * *
        **
        * Turn the split or merge switch on or off.
        * If synchronous is true, it waits until current operation call, if outstanding, to return.
        * 
+ * + * rpc SetSplitOrMergeEnabled(.hbase.pb.SetSplitOrMergeEnabledRequest) returns (.hbase.pb.SetSplitOrMergeEnabledResponse); */ public abstract void setSplitOrMergeEnabled( com.google.protobuf.RpcController controller, @@ -60893,12 +64451,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsSplitOrMergeEnabled(.hbase.pb.IsSplitOrMergeEnabledRequest) returns (.hbase.pb.IsSplitOrMergeEnabledResponse); - * *
        **
        * Query whether the split or merge switch is on/off.
        * 
+ * + * rpc IsSplitOrMergeEnabled(.hbase.pb.IsSplitOrMergeEnabledRequest) returns (.hbase.pb.IsSplitOrMergeEnabledResponse); */ public abstract void isSplitOrMergeEnabled( com.google.protobuf.RpcController controller, @@ -60906,12 +64464,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc Normalize(.hbase.pb.NormalizeRequest) returns (.hbase.pb.NormalizeResponse); - * *
        **
        * Run region normalizer. Can NOT run for various reasons. Check logs.
        * 
+ * + * rpc Normalize(.hbase.pb.NormalizeRequest) returns (.hbase.pb.NormalizeResponse); */ public abstract void normalize( com.google.protobuf.RpcController controller, @@ -60919,12 +64477,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc SetNormalizerRunning(.hbase.pb.SetNormalizerRunningRequest) returns (.hbase.pb.SetNormalizerRunningResponse); - * *
        **
        * Turn region normalizer on or off.
        * 
+ * + * rpc SetNormalizerRunning(.hbase.pb.SetNormalizerRunningRequest) returns (.hbase.pb.SetNormalizerRunningResponse); */ public abstract void setNormalizerRunning( com.google.protobuf.RpcController controller, @@ -60932,12 +64490,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsNormalizerEnabled(.hbase.pb.IsNormalizerEnabledRequest) returns (.hbase.pb.IsNormalizerEnabledResponse); - * *
        **
        * Query whether region normalizer is enabled.
        * 
+ * + * rpc IsNormalizerEnabled(.hbase.pb.IsNormalizerEnabledRequest) returns (.hbase.pb.IsNormalizerEnabledResponse); */ public abstract void isNormalizerEnabled( com.google.protobuf.RpcController controller, @@ -60945,11 +64503,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc RunCatalogScan(.hbase.pb.RunCatalogScanRequest) returns (.hbase.pb.RunCatalogScanResponse); - * *
        ** Get a run of the catalog janitor 
        * 
+ * + * rpc RunCatalogScan(.hbase.pb.RunCatalogScanRequest) returns (.hbase.pb.RunCatalogScanResponse); */ public abstract void runCatalogScan( com.google.protobuf.RpcController controller, @@ -60957,12 +64515,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc EnableCatalogJanitor(.hbase.pb.EnableCatalogJanitorRequest) returns (.hbase.pb.EnableCatalogJanitorResponse); - * *
        **
        * Enable the catalog janitor on or off.
        * 
+ * + * rpc EnableCatalogJanitor(.hbase.pb.EnableCatalogJanitorRequest) returns (.hbase.pb.EnableCatalogJanitorResponse); */ public abstract void enableCatalogJanitor( com.google.protobuf.RpcController controller, @@ -60970,12 +64528,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsCatalogJanitorEnabled(.hbase.pb.IsCatalogJanitorEnabledRequest) returns (.hbase.pb.IsCatalogJanitorEnabledResponse); - * *
        **
        * Query whether the catalog janitor is enabled.
        * 
+ * + * rpc IsCatalogJanitorEnabled(.hbase.pb.IsCatalogJanitorEnabledRequest) returns (.hbase.pb.IsCatalogJanitorEnabledResponse); */ public abstract void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, @@ -60983,12 +64541,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ExecMasterService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse); - * *
        **
        * Call a master coprocessor endpoint
        * 
+ * + * rpc ExecMasterService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse); */ public abstract void execMasterService( com.google.protobuf.RpcController controller, @@ -60996,12 +64554,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc Snapshot(.hbase.pb.SnapshotRequest) returns (.hbase.pb.SnapshotResponse); - * *
        **
        * Create a snapshot for the given table.
        * 
+ * + * rpc Snapshot(.hbase.pb.SnapshotRequest) returns (.hbase.pb.SnapshotResponse); */ public abstract void snapshot( com.google.protobuf.RpcController controller, @@ -61009,13 +64567,13 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetCompletedSnapshots(.hbase.pb.GetCompletedSnapshotsRequest) returns (.hbase.pb.GetCompletedSnapshotsResponse); - * *
        **
        * Get completed snapshots.
        * Returns a list of snapshot descriptors for completed snapshots
        * 
+ * + * rpc GetCompletedSnapshots(.hbase.pb.GetCompletedSnapshotsRequest) returns (.hbase.pb.GetCompletedSnapshotsResponse); */ public abstract void getCompletedSnapshots( com.google.protobuf.RpcController controller, @@ -61023,12 +64581,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DeleteSnapshot(.hbase.pb.DeleteSnapshotRequest) returns (.hbase.pb.DeleteSnapshotResponse); - * *
        **
        * Delete an existing snapshot. This method can also be used to clean up an aborted snapshot.
        * 
+ * + * rpc DeleteSnapshot(.hbase.pb.DeleteSnapshotRequest) returns (.hbase.pb.DeleteSnapshotResponse); */ public abstract void deleteSnapshot( com.google.protobuf.RpcController controller, @@ -61036,12 +64594,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsSnapshotDone(.hbase.pb.IsSnapshotDoneRequest) returns (.hbase.pb.IsSnapshotDoneResponse); - * *
        **
        * Determine if the snapshot is done yet.
        * 
+ * + * rpc IsSnapshotDone(.hbase.pb.IsSnapshotDoneRequest) returns (.hbase.pb.IsSnapshotDoneResponse); */ public abstract void isSnapshotDone( com.google.protobuf.RpcController controller, @@ -61049,12 +64607,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc RestoreSnapshot(.hbase.pb.RestoreSnapshotRequest) returns (.hbase.pb.RestoreSnapshotResponse); - * *
        **
        * Restore a snapshot
        * 
+ * + * rpc RestoreSnapshot(.hbase.pb.RestoreSnapshotRequest) returns (.hbase.pb.RestoreSnapshotResponse); */ public abstract void restoreSnapshot( com.google.protobuf.RpcController controller, @@ -61062,12 +64620,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ExecProcedure(.hbase.pb.ExecProcedureRequest) returns (.hbase.pb.ExecProcedureResponse); - * *
        **
        * Execute a distributed procedure.
        * 
+ * + * rpc ExecProcedure(.hbase.pb.ExecProcedureRequest) returns (.hbase.pb.ExecProcedureResponse); */ public abstract void execProcedure( com.google.protobuf.RpcController controller, @@ -61075,12 +64633,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ExecProcedureWithRet(.hbase.pb.ExecProcedureRequest) returns (.hbase.pb.ExecProcedureResponse); - * *
        **
        * Execute a distributed procedure with return data.
        * 
+ * + * rpc ExecProcedureWithRet(.hbase.pb.ExecProcedureRequest) returns (.hbase.pb.ExecProcedureResponse); */ public abstract void execProcedureWithRet( com.google.protobuf.RpcController controller, @@ -61088,12 +64646,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsProcedureDone(.hbase.pb.IsProcedureDoneRequest) returns (.hbase.pb.IsProcedureDoneResponse); - * *
        **
        * Determine if the procedure is done yet.
        * 
+ * + * rpc IsProcedureDone(.hbase.pb.IsProcedureDoneRequest) returns (.hbase.pb.IsProcedureDoneResponse); */ public abstract void isProcedureDone( com.google.protobuf.RpcController controller, @@ -61101,11 +64659,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ModifyNamespace(.hbase.pb.ModifyNamespaceRequest) returns (.hbase.pb.ModifyNamespaceResponse); - * *
        ** Modify a namespace's metadata 
        * 
+ * + * rpc ModifyNamespace(.hbase.pb.ModifyNamespaceRequest) returns (.hbase.pb.ModifyNamespaceResponse); */ public abstract void modifyNamespace( com.google.protobuf.RpcController controller, @@ -61113,11 +64671,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc CreateNamespace(.hbase.pb.CreateNamespaceRequest) returns (.hbase.pb.CreateNamespaceResponse); - * *
        ** Creates a new namespace synchronously 
        * 
+ * + * rpc CreateNamespace(.hbase.pb.CreateNamespaceRequest) returns (.hbase.pb.CreateNamespaceResponse); */ public abstract void createNamespace( com.google.protobuf.RpcController controller, @@ -61125,11 +64683,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DeleteNamespace(.hbase.pb.DeleteNamespaceRequest) returns (.hbase.pb.DeleteNamespaceResponse); - * *
        ** Deletes namespace synchronously 
        * 
+ * + * rpc DeleteNamespace(.hbase.pb.DeleteNamespaceRequest) returns (.hbase.pb.DeleteNamespaceResponse); */ public abstract void deleteNamespace( com.google.protobuf.RpcController controller, @@ -61137,11 +64695,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetNamespaceDescriptor(.hbase.pb.GetNamespaceDescriptorRequest) returns (.hbase.pb.GetNamespaceDescriptorResponse); - * *
        ** Get a namespace descriptor by name 
        * 
+ * + * rpc GetNamespaceDescriptor(.hbase.pb.GetNamespaceDescriptorRequest) returns (.hbase.pb.GetNamespaceDescriptorResponse); */ public abstract void getNamespaceDescriptor( com.google.protobuf.RpcController controller, @@ -61149,11 +64707,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ListNamespaceDescriptors(.hbase.pb.ListNamespaceDescriptorsRequest) returns (.hbase.pb.ListNamespaceDescriptorsResponse); - * *
        ** returns a list of namespaces 
        * 
+ * + * rpc ListNamespaceDescriptors(.hbase.pb.ListNamespaceDescriptorsRequest) returns (.hbase.pb.ListNamespaceDescriptorsResponse); */ public abstract void listNamespaceDescriptors( com.google.protobuf.RpcController controller, @@ -61161,11 +64719,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ListTableDescriptorsByNamespace(.hbase.pb.ListTableDescriptorsByNamespaceRequest) returns (.hbase.pb.ListTableDescriptorsByNamespaceResponse); - * *
        ** returns a list of tables for a given namespace
        * 
+ * + * rpc ListTableDescriptorsByNamespace(.hbase.pb.ListTableDescriptorsByNamespaceRequest) returns (.hbase.pb.ListTableDescriptorsByNamespaceResponse); */ public abstract void listTableDescriptorsByNamespace( com.google.protobuf.RpcController controller, @@ -61173,11 +64731,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ListTableNamesByNamespace(.hbase.pb.ListTableNamesByNamespaceRequest) returns (.hbase.pb.ListTableNamesByNamespaceResponse); - * *
        ** returns a list of tables for a given namespace
        * 
+ * + * rpc ListTableNamesByNamespace(.hbase.pb.ListTableNamesByNamespaceRequest) returns (.hbase.pb.ListTableNamesByNamespaceResponse); */ public abstract void listTableNamesByNamespace( com.google.protobuf.RpcController controller, @@ -61185,11 +64743,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetTableState(.hbase.pb.GetTableStateRequest) returns (.hbase.pb.GetTableStateResponse); - * *
        ** returns table state 
        * 
+ * + * rpc GetTableState(.hbase.pb.GetTableStateRequest) returns (.hbase.pb.GetTableStateResponse); */ public abstract void getTableState( com.google.protobuf.RpcController controller, @@ -61197,11 +64755,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc SetQuota(.hbase.pb.SetQuotaRequest) returns (.hbase.pb.SetQuotaResponse); - * *
        ** Apply the new quota settings 
        * 
+ * + * rpc SetQuota(.hbase.pb.SetQuotaRequest) returns (.hbase.pb.SetQuotaResponse); */ public abstract void setQuota( com.google.protobuf.RpcController controller, @@ -61209,11 +64767,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc getLastMajorCompactionTimestamp(.hbase.pb.MajorCompactionTimestampRequest) returns (.hbase.pb.MajorCompactionTimestampResponse); - * *
        ** Returns the timestamp of the last major compaction 
        * 
+ * + * rpc getLastMajorCompactionTimestamp(.hbase.pb.MajorCompactionTimestampRequest) returns (.hbase.pb.MajorCompactionTimestampResponse); */ public abstract void getLastMajorCompactionTimestamp( com.google.protobuf.RpcController controller, @@ -61221,11 +64779,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc getLastMajorCompactionTimestampForRegion(.hbase.pb.MajorCompactionTimestampForRegionRequest) returns (.hbase.pb.MajorCompactionTimestampResponse); - * *
        ** Returns the timestamp of the last major compaction 
        * 
+ * + * rpc getLastMajorCompactionTimestampForRegion(.hbase.pb.MajorCompactionTimestampForRegionRequest) returns (.hbase.pb.MajorCompactionTimestampResponse); */ public abstract void getLastMajorCompactionTimestampForRegion( com.google.protobuf.RpcController controller, @@ -61241,11 +64799,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc getSecurityCapabilities(.hbase.pb.SecurityCapabilitiesRequest) returns (.hbase.pb.SecurityCapabilitiesResponse); - * *
        ** Returns the security capabilities in effect on the cluster 
        * 
+ * + * rpc getSecurityCapabilities(.hbase.pb.SecurityCapabilitiesRequest) returns (.hbase.pb.SecurityCapabilitiesResponse); */ public abstract void getSecurityCapabilities( com.google.protobuf.RpcController controller, @@ -61253,11 +64811,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc AbortProcedure(.hbase.pb.AbortProcedureRequest) returns (.hbase.pb.AbortProcedureResponse); - * *
        ** Abort a procedure 
        * 
+ * + * rpc AbortProcedure(.hbase.pb.AbortProcedureRequest) returns (.hbase.pb.AbortProcedureResponse); */ public abstract void abortProcedure( com.google.protobuf.RpcController controller, @@ -61265,11 +64823,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ListProcedures(.hbase.pb.ListProceduresRequest) returns (.hbase.pb.ListProceduresResponse); - * *
        ** returns a list of procedures 
        * 
+ * + * rpc ListProcedures(.hbase.pb.ListProceduresRequest) returns (.hbase.pb.ListProceduresResponse); */ public abstract void listProcedures( com.google.protobuf.RpcController controller, @@ -62138,11 +65696,11 @@ public final class MasterProtos { } /** - * rpc GetSchemaAlterStatus(.hbase.pb.GetSchemaAlterStatusRequest) returns (.hbase.pb.GetSchemaAlterStatusResponse); - * *
      ** Used by the client to get the number of regions that have received the updated schema 
      * 
+ * + * rpc GetSchemaAlterStatus(.hbase.pb.GetSchemaAlterStatusRequest) returns (.hbase.pb.GetSchemaAlterStatusResponse); */ public abstract void getSchemaAlterStatus( com.google.protobuf.RpcController controller, @@ -62150,11 +65708,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetTableDescriptors(.hbase.pb.GetTableDescriptorsRequest) returns (.hbase.pb.GetTableDescriptorsResponse); - * *
      ** Get list of TableDescriptors for requested tables. 
      * 
+ * + * rpc GetTableDescriptors(.hbase.pb.GetTableDescriptorsRequest) returns (.hbase.pb.GetTableDescriptorsResponse); */ public abstract void getTableDescriptors( com.google.protobuf.RpcController controller, @@ -62162,11 +65720,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetTableNames(.hbase.pb.GetTableNamesRequest) returns (.hbase.pb.GetTableNamesResponse); - * *
      ** Get the list of table names. 
      * 
+ * + * rpc GetTableNames(.hbase.pb.GetTableNamesRequest) returns (.hbase.pb.GetTableNamesResponse); */ public abstract void getTableNames( com.google.protobuf.RpcController controller, @@ -62174,11 +65732,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetClusterStatus(.hbase.pb.GetClusterStatusRequest) returns (.hbase.pb.GetClusterStatusResponse); - * *
      ** Return cluster status. 
      * 
+ * + * rpc GetClusterStatus(.hbase.pb.GetClusterStatusRequest) returns (.hbase.pb.GetClusterStatusResponse); */ public abstract void getClusterStatus( com.google.protobuf.RpcController controller, @@ -62186,11 +65744,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsMasterRunning(.hbase.pb.IsMasterRunningRequest) returns (.hbase.pb.IsMasterRunningResponse); - * *
      ** return true if master is available 
      * 
+ * + * rpc IsMasterRunning(.hbase.pb.IsMasterRunningRequest) returns (.hbase.pb.IsMasterRunningResponse); */ public abstract void isMasterRunning( com.google.protobuf.RpcController controller, @@ -62198,11 +65756,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc AddColumn(.hbase.pb.AddColumnRequest) returns (.hbase.pb.AddColumnResponse); - * *
      ** Adds a column to the specified table. 
      * 
+ * + * rpc AddColumn(.hbase.pb.AddColumnRequest) returns (.hbase.pb.AddColumnResponse); */ public abstract void addColumn( com.google.protobuf.RpcController controller, @@ -62210,11 +65768,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DeleteColumn(.hbase.pb.DeleteColumnRequest) returns (.hbase.pb.DeleteColumnResponse); - * *
      ** Deletes a column from the specified table. Table must be disabled. 
      * 
+ * + * rpc DeleteColumn(.hbase.pb.DeleteColumnRequest) returns (.hbase.pb.DeleteColumnResponse); */ public abstract void deleteColumn( com.google.protobuf.RpcController controller, @@ -62222,11 +65780,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ModifyColumn(.hbase.pb.ModifyColumnRequest) returns (.hbase.pb.ModifyColumnResponse); - * *
      ** Modifies an existing column on the specified table. 
      * 
+ * + * rpc ModifyColumn(.hbase.pb.ModifyColumnRequest) returns (.hbase.pb.ModifyColumnResponse); */ public abstract void modifyColumn( com.google.protobuf.RpcController controller, @@ -62234,11 +65792,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc MoveRegion(.hbase.pb.MoveRegionRequest) returns (.hbase.pb.MoveRegionResponse); - * *
      ** Move the region region to the destination server. 
      * 
+ * + * rpc MoveRegion(.hbase.pb.MoveRegionRequest) returns (.hbase.pb.MoveRegionResponse); */ public abstract void moveRegion( com.google.protobuf.RpcController controller, @@ -62246,11 +65804,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DispatchMergingRegions(.hbase.pb.DispatchMergingRegionsRequest) returns (.hbase.pb.DispatchMergingRegionsResponse); - * *
      ** Master dispatch merging the regions 
      * 
+ * + * rpc DispatchMergingRegions(.hbase.pb.DispatchMergingRegionsRequest) returns (.hbase.pb.DispatchMergingRegionsResponse); */ public abstract void dispatchMergingRegions( com.google.protobuf.RpcController controller, @@ -62258,11 +65816,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc AssignRegion(.hbase.pb.AssignRegionRequest) returns (.hbase.pb.AssignRegionResponse); - * *
      ** Assign a region to a server chosen at random. 
      * 
+ * + * rpc AssignRegion(.hbase.pb.AssignRegionRequest) returns (.hbase.pb.AssignRegionResponse); */ public abstract void assignRegion( com.google.protobuf.RpcController controller, @@ -62270,8 +65828,6 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc UnassignRegion(.hbase.pb.UnassignRegionRequest) returns (.hbase.pb.UnassignRegionResponse); - * *
      **
      * Unassign a region from current hosting regionserver.  Region will then be
@@ -62279,6 +65835,8 @@ public final class MasterProtos {
      * back to the same server.  Use MoveRegion if you want
      * to control the region movement.
      * 
+ * + * rpc UnassignRegion(.hbase.pb.UnassignRegionRequest) returns (.hbase.pb.UnassignRegionResponse); */ public abstract void unassignRegion( com.google.protobuf.RpcController controller, @@ -62286,8 +65844,6 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc OfflineRegion(.hbase.pb.OfflineRegionRequest) returns (.hbase.pb.OfflineRegionResponse); - * *
      **
      * Offline a region from the assignment manager's in-memory state.  The
@@ -62295,6 +65851,8 @@ public final class MasterProtos {
      * automatically reassign the region as in unassign.   This is a special
      * method, and should only be used by experts or hbck.
      * 
+ * + * rpc OfflineRegion(.hbase.pb.OfflineRegionRequest) returns (.hbase.pb.OfflineRegionResponse); */ public abstract void offlineRegion( com.google.protobuf.RpcController controller, @@ -62302,11 +65860,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DeleteTable(.hbase.pb.DeleteTableRequest) returns (.hbase.pb.DeleteTableResponse); - * *
      ** Deletes a table 
      * 
+ * + * rpc DeleteTable(.hbase.pb.DeleteTableRequest) returns (.hbase.pb.DeleteTableResponse); */ public abstract void deleteTable( com.google.protobuf.RpcController controller, @@ -62314,11 +65872,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc truncateTable(.hbase.pb.TruncateTableRequest) returns (.hbase.pb.TruncateTableResponse); - * *
      ** Truncate a table 
      * 
+ * + * rpc truncateTable(.hbase.pb.TruncateTableRequest) returns (.hbase.pb.TruncateTableResponse); */ public abstract void truncateTable( com.google.protobuf.RpcController controller, @@ -62326,11 +65884,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc EnableTable(.hbase.pb.EnableTableRequest) returns (.hbase.pb.EnableTableResponse); - * *
      ** Puts the table on-line (only needed if table has been previously taken offline) 
      * 
+ * + * rpc EnableTable(.hbase.pb.EnableTableRequest) returns (.hbase.pb.EnableTableResponse); */ public abstract void enableTable( com.google.protobuf.RpcController controller, @@ -62338,11 +65896,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DisableTable(.hbase.pb.DisableTableRequest) returns (.hbase.pb.DisableTableResponse); - * *
      ** Take table offline 
      * 
+ * + * rpc DisableTable(.hbase.pb.DisableTableRequest) returns (.hbase.pb.DisableTableResponse); */ public abstract void disableTable( com.google.protobuf.RpcController controller, @@ -62350,11 +65908,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ModifyTable(.hbase.pb.ModifyTableRequest) returns (.hbase.pb.ModifyTableResponse); - * *
      ** Modify a table's metadata 
      * 
+ * + * rpc ModifyTable(.hbase.pb.ModifyTableRequest) returns (.hbase.pb.ModifyTableResponse); */ public abstract void modifyTable( com.google.protobuf.RpcController controller, @@ -62362,11 +65920,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc CreateTable(.hbase.pb.CreateTableRequest) returns (.hbase.pb.CreateTableResponse); - * *
      ** Creates a new table asynchronously 
      * 
+ * + * rpc CreateTable(.hbase.pb.CreateTableRequest) returns (.hbase.pb.CreateTableResponse); */ public abstract void createTable( com.google.protobuf.RpcController controller, @@ -62374,11 +65932,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc Shutdown(.hbase.pb.ShutdownRequest) returns (.hbase.pb.ShutdownResponse); - * *
      ** Shutdown an HBase cluster. 
      * 
+ * + * rpc Shutdown(.hbase.pb.ShutdownRequest) returns (.hbase.pb.ShutdownResponse); */ public abstract void shutdown( com.google.protobuf.RpcController controller, @@ -62386,11 +65944,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc StopMaster(.hbase.pb.StopMasterRequest) returns (.hbase.pb.StopMasterResponse); - * *
      ** Stop HBase Master only.  Does not shutdown the cluster. 
      * 
+ * + * rpc StopMaster(.hbase.pb.StopMasterRequest) returns (.hbase.pb.StopMasterResponse); */ public abstract void stopMaster( com.google.protobuf.RpcController controller, @@ -62398,12 +65956,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsMasterInMaintenanceMode(.hbase.pb.IsInMaintenanceModeRequest) returns (.hbase.pb.IsInMaintenanceModeResponse); - * *
      **
      * Query whether the Master is in maintenance mode.
      * 
+ * + * rpc IsMasterInMaintenanceMode(.hbase.pb.IsInMaintenanceModeRequest) returns (.hbase.pb.IsInMaintenanceModeResponse); */ public abstract void isMasterInMaintenanceMode( com.google.protobuf.RpcController controller, @@ -62411,14 +65969,14 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc Balance(.hbase.pb.BalanceRequest) returns (.hbase.pb.BalanceResponse); - * *
      **
      * Run the balancer.  Will run the balancer and if regions to move, it will
      * go ahead and do the reassignments.  Can NOT run for various reasons.
      * Check logs.
      * 
+ * + * rpc Balance(.hbase.pb.BalanceRequest) returns (.hbase.pb.BalanceResponse); */ public abstract void balance( com.google.protobuf.RpcController controller, @@ -62426,13 +65984,13 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc SetBalancerRunning(.hbase.pb.SetBalancerRunningRequest) returns (.hbase.pb.SetBalancerRunningResponse); - * *
      **
      * Turn the load balancer on or off.
      * If synchronous is true, it waits until current balance() call, if outstanding, to return.
      * 
+ * + * rpc SetBalancerRunning(.hbase.pb.SetBalancerRunningRequest) returns (.hbase.pb.SetBalancerRunningResponse); */ public abstract void setBalancerRunning( com.google.protobuf.RpcController controller, @@ -62440,12 +65998,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsBalancerEnabled(.hbase.pb.IsBalancerEnabledRequest) returns (.hbase.pb.IsBalancerEnabledResponse); - * *
      **
      * Query whether the Region Balancer is running.
      * 
+ * + * rpc IsBalancerEnabled(.hbase.pb.IsBalancerEnabledRequest) returns (.hbase.pb.IsBalancerEnabledResponse); */ public abstract void isBalancerEnabled( com.google.protobuf.RpcController controller, @@ -62453,13 +66011,13 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc SetSplitOrMergeEnabled(.hbase.pb.SetSplitOrMergeEnabledRequest) returns (.hbase.pb.SetSplitOrMergeEnabledResponse); - * *
      **
      * Turn the split or merge switch on or off.
      * If synchronous is true, it waits until current operation call, if outstanding, to return.
      * 
+ * + * rpc SetSplitOrMergeEnabled(.hbase.pb.SetSplitOrMergeEnabledRequest) returns (.hbase.pb.SetSplitOrMergeEnabledResponse); */ public abstract void setSplitOrMergeEnabled( com.google.protobuf.RpcController controller, @@ -62467,12 +66025,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsSplitOrMergeEnabled(.hbase.pb.IsSplitOrMergeEnabledRequest) returns (.hbase.pb.IsSplitOrMergeEnabledResponse); - * *
      **
      * Query whether the split or merge switch is on/off.
      * 
+ * + * rpc IsSplitOrMergeEnabled(.hbase.pb.IsSplitOrMergeEnabledRequest) returns (.hbase.pb.IsSplitOrMergeEnabledResponse); */ public abstract void isSplitOrMergeEnabled( com.google.protobuf.RpcController controller, @@ -62480,12 +66038,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc Normalize(.hbase.pb.NormalizeRequest) returns (.hbase.pb.NormalizeResponse); - * *
      **
      * Run region normalizer. Can NOT run for various reasons. Check logs.
      * 
+ * + * rpc Normalize(.hbase.pb.NormalizeRequest) returns (.hbase.pb.NormalizeResponse); */ public abstract void normalize( com.google.protobuf.RpcController controller, @@ -62493,12 +66051,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc SetNormalizerRunning(.hbase.pb.SetNormalizerRunningRequest) returns (.hbase.pb.SetNormalizerRunningResponse); - * *
      **
      * Turn region normalizer on or off.
      * 
+ * + * rpc SetNormalizerRunning(.hbase.pb.SetNormalizerRunningRequest) returns (.hbase.pb.SetNormalizerRunningResponse); */ public abstract void setNormalizerRunning( com.google.protobuf.RpcController controller, @@ -62506,12 +66064,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsNormalizerEnabled(.hbase.pb.IsNormalizerEnabledRequest) returns (.hbase.pb.IsNormalizerEnabledResponse); - * *
      **
      * Query whether region normalizer is enabled.
      * 
+ * + * rpc IsNormalizerEnabled(.hbase.pb.IsNormalizerEnabledRequest) returns (.hbase.pb.IsNormalizerEnabledResponse); */ public abstract void isNormalizerEnabled( com.google.protobuf.RpcController controller, @@ -62519,11 +66077,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc RunCatalogScan(.hbase.pb.RunCatalogScanRequest) returns (.hbase.pb.RunCatalogScanResponse); - * *
      ** Get a run of the catalog janitor 
      * 
+ * + * rpc RunCatalogScan(.hbase.pb.RunCatalogScanRequest) returns (.hbase.pb.RunCatalogScanResponse); */ public abstract void runCatalogScan( com.google.protobuf.RpcController controller, @@ -62531,12 +66089,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc EnableCatalogJanitor(.hbase.pb.EnableCatalogJanitorRequest) returns (.hbase.pb.EnableCatalogJanitorResponse); - * *
      **
      * Enable the catalog janitor on or off.
      * 
+ * + * rpc EnableCatalogJanitor(.hbase.pb.EnableCatalogJanitorRequest) returns (.hbase.pb.EnableCatalogJanitorResponse); */ public abstract void enableCatalogJanitor( com.google.protobuf.RpcController controller, @@ -62544,12 +66102,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsCatalogJanitorEnabled(.hbase.pb.IsCatalogJanitorEnabledRequest) returns (.hbase.pb.IsCatalogJanitorEnabledResponse); - * *
      **
      * Query whether the catalog janitor is enabled.
      * 
+ * + * rpc IsCatalogJanitorEnabled(.hbase.pb.IsCatalogJanitorEnabledRequest) returns (.hbase.pb.IsCatalogJanitorEnabledResponse); */ public abstract void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, @@ -62557,12 +66115,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ExecMasterService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse); - * *
      **
      * Call a master coprocessor endpoint
      * 
+ * + * rpc ExecMasterService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse); */ public abstract void execMasterService( com.google.protobuf.RpcController controller, @@ -62570,12 +66128,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc Snapshot(.hbase.pb.SnapshotRequest) returns (.hbase.pb.SnapshotResponse); - * *
      **
      * Create a snapshot for the given table.
      * 
+ * + * rpc Snapshot(.hbase.pb.SnapshotRequest) returns (.hbase.pb.SnapshotResponse); */ public abstract void snapshot( com.google.protobuf.RpcController controller, @@ -62583,13 +66141,13 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetCompletedSnapshots(.hbase.pb.GetCompletedSnapshotsRequest) returns (.hbase.pb.GetCompletedSnapshotsResponse); - * *
      **
      * Get completed snapshots.
      * Returns a list of snapshot descriptors for completed snapshots
      * 
+ * + * rpc GetCompletedSnapshots(.hbase.pb.GetCompletedSnapshotsRequest) returns (.hbase.pb.GetCompletedSnapshotsResponse); */ public abstract void getCompletedSnapshots( com.google.protobuf.RpcController controller, @@ -62597,12 +66155,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DeleteSnapshot(.hbase.pb.DeleteSnapshotRequest) returns (.hbase.pb.DeleteSnapshotResponse); - * *
      **
      * Delete an existing snapshot. This method can also be used to clean up an aborted snapshot.
      * 
+ * + * rpc DeleteSnapshot(.hbase.pb.DeleteSnapshotRequest) returns (.hbase.pb.DeleteSnapshotResponse); */ public abstract void deleteSnapshot( com.google.protobuf.RpcController controller, @@ -62610,12 +66168,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsSnapshotDone(.hbase.pb.IsSnapshotDoneRequest) returns (.hbase.pb.IsSnapshotDoneResponse); - * *
      **
      * Determine if the snapshot is done yet.
      * 
+ * + * rpc IsSnapshotDone(.hbase.pb.IsSnapshotDoneRequest) returns (.hbase.pb.IsSnapshotDoneResponse); */ public abstract void isSnapshotDone( com.google.protobuf.RpcController controller, @@ -62623,12 +66181,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc RestoreSnapshot(.hbase.pb.RestoreSnapshotRequest) returns (.hbase.pb.RestoreSnapshotResponse); - * *
      **
      * Restore a snapshot
      * 
+ * + * rpc RestoreSnapshot(.hbase.pb.RestoreSnapshotRequest) returns (.hbase.pb.RestoreSnapshotResponse); */ public abstract void restoreSnapshot( com.google.protobuf.RpcController controller, @@ -62636,12 +66194,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ExecProcedure(.hbase.pb.ExecProcedureRequest) returns (.hbase.pb.ExecProcedureResponse); - * *
      **
      * Execute a distributed procedure.
      * 
+ * + * rpc ExecProcedure(.hbase.pb.ExecProcedureRequest) returns (.hbase.pb.ExecProcedureResponse); */ public abstract void execProcedure( com.google.protobuf.RpcController controller, @@ -62649,12 +66207,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ExecProcedureWithRet(.hbase.pb.ExecProcedureRequest) returns (.hbase.pb.ExecProcedureResponse); - * *
      **
      * Execute a distributed procedure with return data.
      * 
+ * + * rpc ExecProcedureWithRet(.hbase.pb.ExecProcedureRequest) returns (.hbase.pb.ExecProcedureResponse); */ public abstract void execProcedureWithRet( com.google.protobuf.RpcController controller, @@ -62662,12 +66220,12 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc IsProcedureDone(.hbase.pb.IsProcedureDoneRequest) returns (.hbase.pb.IsProcedureDoneResponse); - * *
      **
      * Determine if the procedure is done yet.
      * 
+ * + * rpc IsProcedureDone(.hbase.pb.IsProcedureDoneRequest) returns (.hbase.pb.IsProcedureDoneResponse); */ public abstract void isProcedureDone( com.google.protobuf.RpcController controller, @@ -62675,11 +66233,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ModifyNamespace(.hbase.pb.ModifyNamespaceRequest) returns (.hbase.pb.ModifyNamespaceResponse); - * *
      ** Modify a namespace's metadata 
      * 
+ * + * rpc ModifyNamespace(.hbase.pb.ModifyNamespaceRequest) returns (.hbase.pb.ModifyNamespaceResponse); */ public abstract void modifyNamespace( com.google.protobuf.RpcController controller, @@ -62687,11 +66245,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc CreateNamespace(.hbase.pb.CreateNamespaceRequest) returns (.hbase.pb.CreateNamespaceResponse); - * *
      ** Creates a new namespace synchronously 
      * 
+ * + * rpc CreateNamespace(.hbase.pb.CreateNamespaceRequest) returns (.hbase.pb.CreateNamespaceResponse); */ public abstract void createNamespace( com.google.protobuf.RpcController controller, @@ -62699,11 +66257,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc DeleteNamespace(.hbase.pb.DeleteNamespaceRequest) returns (.hbase.pb.DeleteNamespaceResponse); - * *
      ** Deletes namespace synchronously 
      * 
+ * + * rpc DeleteNamespace(.hbase.pb.DeleteNamespaceRequest) returns (.hbase.pb.DeleteNamespaceResponse); */ public abstract void deleteNamespace( com.google.protobuf.RpcController controller, @@ -62711,11 +66269,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetNamespaceDescriptor(.hbase.pb.GetNamespaceDescriptorRequest) returns (.hbase.pb.GetNamespaceDescriptorResponse); - * *
      ** Get a namespace descriptor by name 
      * 
+ * + * rpc GetNamespaceDescriptor(.hbase.pb.GetNamespaceDescriptorRequest) returns (.hbase.pb.GetNamespaceDescriptorResponse); */ public abstract void getNamespaceDescriptor( com.google.protobuf.RpcController controller, @@ -62723,11 +66281,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ListNamespaceDescriptors(.hbase.pb.ListNamespaceDescriptorsRequest) returns (.hbase.pb.ListNamespaceDescriptorsResponse); - * *
      ** returns a list of namespaces 
      * 
+ * + * rpc ListNamespaceDescriptors(.hbase.pb.ListNamespaceDescriptorsRequest) returns (.hbase.pb.ListNamespaceDescriptorsResponse); */ public abstract void listNamespaceDescriptors( com.google.protobuf.RpcController controller, @@ -62735,11 +66293,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ListTableDescriptorsByNamespace(.hbase.pb.ListTableDescriptorsByNamespaceRequest) returns (.hbase.pb.ListTableDescriptorsByNamespaceResponse); - * *
      ** returns a list of tables for a given namespace
      * 
+ * + * rpc ListTableDescriptorsByNamespace(.hbase.pb.ListTableDescriptorsByNamespaceRequest) returns (.hbase.pb.ListTableDescriptorsByNamespaceResponse); */ public abstract void listTableDescriptorsByNamespace( com.google.protobuf.RpcController controller, @@ -62747,11 +66305,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ListTableNamesByNamespace(.hbase.pb.ListTableNamesByNamespaceRequest) returns (.hbase.pb.ListTableNamesByNamespaceResponse); - * *
      ** returns a list of tables for a given namespace
      * 
+ * + * rpc ListTableNamesByNamespace(.hbase.pb.ListTableNamesByNamespaceRequest) returns (.hbase.pb.ListTableNamesByNamespaceResponse); */ public abstract void listTableNamesByNamespace( com.google.protobuf.RpcController controller, @@ -62759,11 +66317,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetTableState(.hbase.pb.GetTableStateRequest) returns (.hbase.pb.GetTableStateResponse); - * *
      ** returns table state 
      * 
+ * + * rpc GetTableState(.hbase.pb.GetTableStateRequest) returns (.hbase.pb.GetTableStateResponse); */ public abstract void getTableState( com.google.protobuf.RpcController controller, @@ -62771,11 +66329,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc SetQuota(.hbase.pb.SetQuotaRequest) returns (.hbase.pb.SetQuotaResponse); - * *
      ** Apply the new quota settings 
      * 
+ * + * rpc SetQuota(.hbase.pb.SetQuotaRequest) returns (.hbase.pb.SetQuotaResponse); */ public abstract void setQuota( com.google.protobuf.RpcController controller, @@ -62783,11 +66341,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc getLastMajorCompactionTimestamp(.hbase.pb.MajorCompactionTimestampRequest) returns (.hbase.pb.MajorCompactionTimestampResponse); - * *
      ** Returns the timestamp of the last major compaction 
      * 
+ * + * rpc getLastMajorCompactionTimestamp(.hbase.pb.MajorCompactionTimestampRequest) returns (.hbase.pb.MajorCompactionTimestampResponse); */ public abstract void getLastMajorCompactionTimestamp( com.google.protobuf.RpcController controller, @@ -62795,11 +66353,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc getLastMajorCompactionTimestampForRegion(.hbase.pb.MajorCompactionTimestampForRegionRequest) returns (.hbase.pb.MajorCompactionTimestampResponse); - * *
      ** Returns the timestamp of the last major compaction 
      * 
+ * + * rpc getLastMajorCompactionTimestampForRegion(.hbase.pb.MajorCompactionTimestampForRegionRequest) returns (.hbase.pb.MajorCompactionTimestampResponse); */ public abstract void getLastMajorCompactionTimestampForRegion( com.google.protobuf.RpcController controller, @@ -62815,11 +66373,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc getSecurityCapabilities(.hbase.pb.SecurityCapabilitiesRequest) returns (.hbase.pb.SecurityCapabilitiesResponse); - * *
      ** Returns the security capabilities in effect on the cluster 
      * 
+ * + * rpc getSecurityCapabilities(.hbase.pb.SecurityCapabilitiesRequest) returns (.hbase.pb.SecurityCapabilitiesResponse); */ public abstract void getSecurityCapabilities( com.google.protobuf.RpcController controller, @@ -62827,11 +66385,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc AbortProcedure(.hbase.pb.AbortProcedureRequest) returns (.hbase.pb.AbortProcedureResponse); - * *
      ** Abort a procedure 
      * 
+ * + * rpc AbortProcedure(.hbase.pb.AbortProcedureRequest) returns (.hbase.pb.AbortProcedureResponse); */ public abstract void abortProcedure( com.google.protobuf.RpcController controller, @@ -62839,11 +66397,11 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** - * rpc ListProcedures(.hbase.pb.ListProceduresRequest) returns (.hbase.pb.ListProceduresResponse); - * *
      ** returns a list of procedures 
      * 
+ * + * rpc ListProcedures(.hbase.pb.ListProceduresRequest) returns (.hbase.pb.ListProceduresResponse); */ public abstract void listProcedures( com.google.protobuf.RpcController controller, @@ -65277,567 +68835,567 @@ public final class MasterProtos { // @@protoc_insertion_point(class_scope:hbase.pb.MasterService) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_AddColumnRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_AddColumnRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_AddColumnResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_AddColumnResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteColumnRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteColumnRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteColumnResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteColumnResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ModifyColumnRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ModifyColumnRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ModifyColumnResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ModifyColumnResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MoveRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MoveRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MoveRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MoveRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DispatchMergingRegionsRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DispatchMergingRegionsRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DispatchMergingRegionsResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DispatchMergingRegionsResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_AssignRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_AssignRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_AssignRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_AssignRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UnassignRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UnassignRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UnassignRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UnassignRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_OfflineRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_OfflineRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_OfflineRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_OfflineRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CreateTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CreateTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CreateTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CreateTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TruncateTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TruncateTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TruncateTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TruncateTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_EnableTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_EnableTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_EnableTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_EnableTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DisableTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DisableTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DisableTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DisableTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ModifyTableRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ModifyTableRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ModifyTableResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ModifyTableResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CreateNamespaceRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CreateNamespaceRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CreateNamespaceResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CreateNamespaceResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteNamespaceRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteNamespaceRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteNamespaceResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteNamespaceResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ModifyNamespaceRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ModifyNamespaceRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ModifyNamespaceResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ModifyNamespaceResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetNamespaceDescriptorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetNamespaceDescriptorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetNamespaceDescriptorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetNamespaceDescriptorResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ListNamespaceDescriptorsRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ListNamespaceDescriptorsRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ListNamespaceDescriptorsResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ListNamespaceDescriptorsResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ListTableNamesByNamespaceRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ListTableNamesByNamespaceRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ListTableNamesByNamespaceResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ListTableNamesByNamespaceResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ShutdownRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ShutdownRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ShutdownResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ShutdownResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StopMasterRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_StopMasterRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StopMasterResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_StopMasterResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsInMaintenanceModeRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsInMaintenanceModeRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsInMaintenanceModeResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsInMaintenanceModeResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BalanceRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BalanceRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BalanceResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BalanceResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetBalancerRunningRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SetBalancerRunningRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetBalancerRunningResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SetBalancerRunningResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsBalancerEnabledRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsBalancerEnabledRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsBalancerEnabledResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsBalancerEnabledResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NormalizeRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NormalizeResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsNormalizerEnabledResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RunCatalogScanRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RunCatalogScanRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RunCatalogScanResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RunCatalogScanResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_EnableCatalogJanitorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_EnableCatalogJanitorResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetCompletedSnapshotsResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteSnapshotRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteSnapshotRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeleteSnapshotResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeleteSnapshotResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RestoreSnapshotRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RestoreSnapshotRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RestoreSnapshotResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RestoreSnapshotResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsSnapshotDoneRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsSnapshotDoneResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetSchemaAlterStatusRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetSchemaAlterStatusResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetTableDescriptorsRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetTableDescriptorsResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetTableNamesRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetTableNamesRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetTableNamesResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetTableNamesResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetTableStateRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetTableStateRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetTableStateResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetTableStateResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetClusterStatusRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetClusterStatusRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetClusterStatusResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetClusterStatusResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsMasterRunningRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsMasterRunningRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsMasterRunningResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsMasterRunningResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ExecProcedureRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ExecProcedureRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ExecProcedureResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ExecProcedureResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsProcedureDoneRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsProcedureDoneRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsProcedureDoneResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsProcedureDoneResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetProcedureResultRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetProcedureResultRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetProcedureResultResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetProcedureResultResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_AbortProcedureRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_AbortProcedureRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_AbortProcedureResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_AbortProcedureResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ListProceduresRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ListProceduresResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetQuotaRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SetQuotaRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetQuotaResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SetQuotaResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MajorCompactionTimestampRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MajorCompactionTimestampResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -66173,679 +69731,13 @@ public final class MasterProtos { "B\014MasterProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_AddColumnRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_AddColumnRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_AddColumnRequest_descriptor, - new java.lang.String[] { "TableName", "ColumnFamilies", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_AddColumnResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_AddColumnResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_AddColumnResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_DeleteColumnRequest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_DeleteColumnRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteColumnRequest_descriptor, - new java.lang.String[] { "TableName", "ColumnName", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_DeleteColumnResponse_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_DeleteColumnResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteColumnResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_ModifyColumnRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_ModifyColumnRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ModifyColumnRequest_descriptor, - new java.lang.String[] { "TableName", "ColumnFamilies", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_ModifyColumnResponse_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_ModifyColumnResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ModifyColumnResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_MoveRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_MoveRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MoveRegionRequest_descriptor, - new java.lang.String[] { "Region", "DestServerName", }); - internal_static_hbase_pb_MoveRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_MoveRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MoveRegionResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_DispatchMergingRegionsRequest_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_DispatchMergingRegionsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DispatchMergingRegionsRequest_descriptor, - new java.lang.String[] { "RegionA", "RegionB", "Forcible", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_DispatchMergingRegionsResponse_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_hbase_pb_DispatchMergingRegionsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DispatchMergingRegionsResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_AssignRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_hbase_pb_AssignRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_AssignRegionRequest_descriptor, - new java.lang.String[] { "Region", }); - internal_static_hbase_pb_AssignRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_hbase_pb_AssignRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_AssignRegionResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_UnassignRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_hbase_pb_UnassignRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_UnassignRegionRequest_descriptor, - new java.lang.String[] { "Region", "Force", }); - internal_static_hbase_pb_UnassignRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_hbase_pb_UnassignRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_UnassignRegionResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_OfflineRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_hbase_pb_OfflineRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_OfflineRegionRequest_descriptor, - new java.lang.String[] { "Region", }); - internal_static_hbase_pb_OfflineRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_hbase_pb_OfflineRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_OfflineRegionResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_CreateTableRequest_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_hbase_pb_CreateTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CreateTableRequest_descriptor, - new java.lang.String[] { "TableSchema", "SplitKeys", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_CreateTableResponse_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_hbase_pb_CreateTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CreateTableResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_DeleteTableRequest_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_hbase_pb_DeleteTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteTableRequest_descriptor, - new java.lang.String[] { "TableName", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_DeleteTableResponse_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_hbase_pb_DeleteTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteTableResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_TruncateTableRequest_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_hbase_pb_TruncateTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TruncateTableRequest_descriptor, - new java.lang.String[] { "TableName", "PreserveSplits", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_TruncateTableResponse_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_hbase_pb_TruncateTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TruncateTableResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_EnableTableRequest_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_hbase_pb_EnableTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_EnableTableRequest_descriptor, - new java.lang.String[] { "TableName", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_EnableTableResponse_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_hbase_pb_EnableTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_EnableTableResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_DisableTableRequest_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_hbase_pb_DisableTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DisableTableRequest_descriptor, - new java.lang.String[] { "TableName", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_DisableTableResponse_descriptor = - getDescriptor().getMessageTypes().get(25); - internal_static_hbase_pb_DisableTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DisableTableResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_ModifyTableRequest_descriptor = - getDescriptor().getMessageTypes().get(26); - internal_static_hbase_pb_ModifyTableRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ModifyTableRequest_descriptor, - new java.lang.String[] { "TableName", "TableSchema", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_ModifyTableResponse_descriptor = - getDescriptor().getMessageTypes().get(27); - internal_static_hbase_pb_ModifyTableResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ModifyTableResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_CreateNamespaceRequest_descriptor = - getDescriptor().getMessageTypes().get(28); - internal_static_hbase_pb_CreateNamespaceRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CreateNamespaceRequest_descriptor, - new java.lang.String[] { "NamespaceDescriptor", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_CreateNamespaceResponse_descriptor = - getDescriptor().getMessageTypes().get(29); - internal_static_hbase_pb_CreateNamespaceResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CreateNamespaceResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_DeleteNamespaceRequest_descriptor = - getDescriptor().getMessageTypes().get(30); - internal_static_hbase_pb_DeleteNamespaceRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteNamespaceRequest_descriptor, - new java.lang.String[] { "NamespaceName", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_DeleteNamespaceResponse_descriptor = - getDescriptor().getMessageTypes().get(31); - internal_static_hbase_pb_DeleteNamespaceResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteNamespaceResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_ModifyNamespaceRequest_descriptor = - getDescriptor().getMessageTypes().get(32); - internal_static_hbase_pb_ModifyNamespaceRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ModifyNamespaceRequest_descriptor, - new java.lang.String[] { "NamespaceDescriptor", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_ModifyNamespaceResponse_descriptor = - getDescriptor().getMessageTypes().get(33); - internal_static_hbase_pb_ModifyNamespaceResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ModifyNamespaceResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_GetNamespaceDescriptorRequest_descriptor = - getDescriptor().getMessageTypes().get(34); - internal_static_hbase_pb_GetNamespaceDescriptorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetNamespaceDescriptorRequest_descriptor, - new java.lang.String[] { "NamespaceName", }); - internal_static_hbase_pb_GetNamespaceDescriptorResponse_descriptor = - getDescriptor().getMessageTypes().get(35); - internal_static_hbase_pb_GetNamespaceDescriptorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetNamespaceDescriptorResponse_descriptor, - new java.lang.String[] { "NamespaceDescriptor", }); - internal_static_hbase_pb_ListNamespaceDescriptorsRequest_descriptor = - getDescriptor().getMessageTypes().get(36); - internal_static_hbase_pb_ListNamespaceDescriptorsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ListNamespaceDescriptorsRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_ListNamespaceDescriptorsResponse_descriptor = - getDescriptor().getMessageTypes().get(37); - internal_static_hbase_pb_ListNamespaceDescriptorsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ListNamespaceDescriptorsResponse_descriptor, - new java.lang.String[] { "NamespaceDescriptor", }); - internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_descriptor = - getDescriptor().getMessageTypes().get(38); - internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_descriptor, - new java.lang.String[] { "NamespaceName", }); - internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_descriptor = - getDescriptor().getMessageTypes().get(39); - internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_descriptor, - new java.lang.String[] { "TableSchema", }); - internal_static_hbase_pb_ListTableNamesByNamespaceRequest_descriptor = - getDescriptor().getMessageTypes().get(40); - internal_static_hbase_pb_ListTableNamesByNamespaceRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ListTableNamesByNamespaceRequest_descriptor, - new java.lang.String[] { "NamespaceName", }); - internal_static_hbase_pb_ListTableNamesByNamespaceResponse_descriptor = - getDescriptor().getMessageTypes().get(41); - internal_static_hbase_pb_ListTableNamesByNamespaceResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ListTableNamesByNamespaceResponse_descriptor, - new java.lang.String[] { "TableName", }); - internal_static_hbase_pb_ShutdownRequest_descriptor = - getDescriptor().getMessageTypes().get(42); - internal_static_hbase_pb_ShutdownRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ShutdownRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_ShutdownResponse_descriptor = - getDescriptor().getMessageTypes().get(43); - internal_static_hbase_pb_ShutdownResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ShutdownResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_StopMasterRequest_descriptor = - getDescriptor().getMessageTypes().get(44); - internal_static_hbase_pb_StopMasterRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_StopMasterRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_StopMasterResponse_descriptor = - getDescriptor().getMessageTypes().get(45); - internal_static_hbase_pb_StopMasterResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_StopMasterResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_IsInMaintenanceModeRequest_descriptor = - getDescriptor().getMessageTypes().get(46); - internal_static_hbase_pb_IsInMaintenanceModeRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsInMaintenanceModeRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_IsInMaintenanceModeResponse_descriptor = - getDescriptor().getMessageTypes().get(47); - internal_static_hbase_pb_IsInMaintenanceModeResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsInMaintenanceModeResponse_descriptor, - new java.lang.String[] { "InMaintenanceMode", }); - internal_static_hbase_pb_BalanceRequest_descriptor = - getDescriptor().getMessageTypes().get(48); - internal_static_hbase_pb_BalanceRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BalanceRequest_descriptor, - new java.lang.String[] { "Force", }); - internal_static_hbase_pb_BalanceResponse_descriptor = - getDescriptor().getMessageTypes().get(49); - internal_static_hbase_pb_BalanceResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BalanceResponse_descriptor, - new java.lang.String[] { "BalancerRan", }); - internal_static_hbase_pb_SetBalancerRunningRequest_descriptor = - getDescriptor().getMessageTypes().get(50); - internal_static_hbase_pb_SetBalancerRunningRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SetBalancerRunningRequest_descriptor, - new java.lang.String[] { "On", "Synchronous", }); - internal_static_hbase_pb_SetBalancerRunningResponse_descriptor = - getDescriptor().getMessageTypes().get(51); - internal_static_hbase_pb_SetBalancerRunningResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SetBalancerRunningResponse_descriptor, - new java.lang.String[] { "PrevBalanceValue", }); - internal_static_hbase_pb_IsBalancerEnabledRequest_descriptor = - getDescriptor().getMessageTypes().get(52); - internal_static_hbase_pb_IsBalancerEnabledRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsBalancerEnabledRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_IsBalancerEnabledResponse_descriptor = - getDescriptor().getMessageTypes().get(53); - internal_static_hbase_pb_IsBalancerEnabledResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsBalancerEnabledResponse_descriptor, - new java.lang.String[] { "Enabled", }); - internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor = - getDescriptor().getMessageTypes().get(54); - internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor, - new java.lang.String[] { "Enabled", "Synchronous", "SwitchTypes", }); - internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor = - getDescriptor().getMessageTypes().get(55); - internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor, - new java.lang.String[] { "PrevValue", }); - internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor = - getDescriptor().getMessageTypes().get(56); - internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor, - new java.lang.String[] { "SwitchType", }); - internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor = - getDescriptor().getMessageTypes().get(57); - internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor, - new java.lang.String[] { "Enabled", }); - internal_static_hbase_pb_NormalizeRequest_descriptor = - getDescriptor().getMessageTypes().get(58); - internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_NormalizeRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_NormalizeResponse_descriptor = - getDescriptor().getMessageTypes().get(59); - internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_NormalizeResponse_descriptor, - new java.lang.String[] { "NormalizerRan", }); - internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor = - getDescriptor().getMessageTypes().get(60); - internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor, - new java.lang.String[] { "On", }); - internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor = - getDescriptor().getMessageTypes().get(61); - internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor, - new java.lang.String[] { "PrevNormalizerValue", }); - internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor = - getDescriptor().getMessageTypes().get(62); - internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor = - getDescriptor().getMessageTypes().get(63); - internal_static_hbase_pb_IsNormalizerEnabledResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor, - new java.lang.String[] { "Enabled", }); - internal_static_hbase_pb_RunCatalogScanRequest_descriptor = - getDescriptor().getMessageTypes().get(64); - internal_static_hbase_pb_RunCatalogScanRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RunCatalogScanRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_RunCatalogScanResponse_descriptor = - getDescriptor().getMessageTypes().get(65); - internal_static_hbase_pb_RunCatalogScanResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RunCatalogScanResponse_descriptor, - new java.lang.String[] { "ScanResult", }); - internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor = - getDescriptor().getMessageTypes().get(66); - internal_static_hbase_pb_EnableCatalogJanitorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor, - new java.lang.String[] { "Enable", }); - internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor = - getDescriptor().getMessageTypes().get(67); - internal_static_hbase_pb_EnableCatalogJanitorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor, - new java.lang.String[] { "PrevValue", }); - internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor = - getDescriptor().getMessageTypes().get(68); - internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor = - getDescriptor().getMessageTypes().get(69); - internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor, - new java.lang.String[] { "Value", }); - internal_static_hbase_pb_SnapshotRequest_descriptor = - getDescriptor().getMessageTypes().get(70); - internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SnapshotRequest_descriptor, - new java.lang.String[] { "Snapshot", }); - internal_static_hbase_pb_SnapshotResponse_descriptor = - getDescriptor().getMessageTypes().get(71); - internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SnapshotResponse_descriptor, - new java.lang.String[] { "ExpectedTimeout", }); - internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor = - getDescriptor().getMessageTypes().get(72); - internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor = - getDescriptor().getMessageTypes().get(73); - internal_static_hbase_pb_GetCompletedSnapshotsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor, - new java.lang.String[] { "Snapshots", }); - internal_static_hbase_pb_DeleteSnapshotRequest_descriptor = - getDescriptor().getMessageTypes().get(74); - internal_static_hbase_pb_DeleteSnapshotRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteSnapshotRequest_descriptor, - new java.lang.String[] { "Snapshot", }); - internal_static_hbase_pb_DeleteSnapshotResponse_descriptor = - getDescriptor().getMessageTypes().get(75); - internal_static_hbase_pb_DeleteSnapshotResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeleteSnapshotResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_RestoreSnapshotRequest_descriptor = - getDescriptor().getMessageTypes().get(76); - internal_static_hbase_pb_RestoreSnapshotRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RestoreSnapshotRequest_descriptor, - new java.lang.String[] { "Snapshot", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_RestoreSnapshotResponse_descriptor = - getDescriptor().getMessageTypes().get(77); - internal_static_hbase_pb_RestoreSnapshotResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RestoreSnapshotResponse_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor = - getDescriptor().getMessageTypes().get(78); - internal_static_hbase_pb_IsSnapshotDoneRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor, - new java.lang.String[] { "Snapshot", }); - internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor = - getDescriptor().getMessageTypes().get(79); - internal_static_hbase_pb_IsSnapshotDoneResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor, - new java.lang.String[] { "Done", "Snapshot", }); - internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor = - getDescriptor().getMessageTypes().get(80); - internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor, - new java.lang.String[] { "Snapshot", }); - internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor = - getDescriptor().getMessageTypes().get(81); - internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor, - new java.lang.String[] { "Done", }); - internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor = - getDescriptor().getMessageTypes().get(82); - internal_static_hbase_pb_GetSchemaAlterStatusRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor, - new java.lang.String[] { "TableName", }); - internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor = - getDescriptor().getMessageTypes().get(83); - internal_static_hbase_pb_GetSchemaAlterStatusResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor, - new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", }); - internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor = - getDescriptor().getMessageTypes().get(84); - internal_static_hbase_pb_GetTableDescriptorsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor, - new java.lang.String[] { "TableNames", "Regex", "IncludeSysTables", "Namespace", }); - internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor = - getDescriptor().getMessageTypes().get(85); - internal_static_hbase_pb_GetTableDescriptorsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor, - new java.lang.String[] { "TableSchema", }); - internal_static_hbase_pb_GetTableNamesRequest_descriptor = - getDescriptor().getMessageTypes().get(86); - internal_static_hbase_pb_GetTableNamesRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetTableNamesRequest_descriptor, - new java.lang.String[] { "Regex", "IncludeSysTables", "Namespace", }); - internal_static_hbase_pb_GetTableNamesResponse_descriptor = - getDescriptor().getMessageTypes().get(87); - internal_static_hbase_pb_GetTableNamesResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetTableNamesResponse_descriptor, - new java.lang.String[] { "TableNames", }); - internal_static_hbase_pb_GetTableStateRequest_descriptor = - getDescriptor().getMessageTypes().get(88); - internal_static_hbase_pb_GetTableStateRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetTableStateRequest_descriptor, - new java.lang.String[] { "TableName", }); - internal_static_hbase_pb_GetTableStateResponse_descriptor = - getDescriptor().getMessageTypes().get(89); - internal_static_hbase_pb_GetTableStateResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetTableStateResponse_descriptor, - new java.lang.String[] { "TableState", }); - internal_static_hbase_pb_GetClusterStatusRequest_descriptor = - getDescriptor().getMessageTypes().get(90); - internal_static_hbase_pb_GetClusterStatusRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetClusterStatusRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_GetClusterStatusResponse_descriptor = - getDescriptor().getMessageTypes().get(91); - internal_static_hbase_pb_GetClusterStatusResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetClusterStatusResponse_descriptor, - new java.lang.String[] { "ClusterStatus", }); - internal_static_hbase_pb_IsMasterRunningRequest_descriptor = - getDescriptor().getMessageTypes().get(92); - internal_static_hbase_pb_IsMasterRunningRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsMasterRunningRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_IsMasterRunningResponse_descriptor = - getDescriptor().getMessageTypes().get(93); - internal_static_hbase_pb_IsMasterRunningResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsMasterRunningResponse_descriptor, - new java.lang.String[] { "IsMasterRunning", }); - internal_static_hbase_pb_ExecProcedureRequest_descriptor = - getDescriptor().getMessageTypes().get(94); - internal_static_hbase_pb_ExecProcedureRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ExecProcedureRequest_descriptor, - new java.lang.String[] { "Procedure", }); - internal_static_hbase_pb_ExecProcedureResponse_descriptor = - getDescriptor().getMessageTypes().get(95); - internal_static_hbase_pb_ExecProcedureResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ExecProcedureResponse_descriptor, - new java.lang.String[] { "ExpectedTimeout", "ReturnData", }); - internal_static_hbase_pb_IsProcedureDoneRequest_descriptor = - getDescriptor().getMessageTypes().get(96); - internal_static_hbase_pb_IsProcedureDoneRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsProcedureDoneRequest_descriptor, - new java.lang.String[] { "Procedure", }); - internal_static_hbase_pb_IsProcedureDoneResponse_descriptor = - getDescriptor().getMessageTypes().get(97); - internal_static_hbase_pb_IsProcedureDoneResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_IsProcedureDoneResponse_descriptor, - new java.lang.String[] { "Done", "Snapshot", }); - internal_static_hbase_pb_GetProcedureResultRequest_descriptor = - getDescriptor().getMessageTypes().get(98); - internal_static_hbase_pb_GetProcedureResultRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetProcedureResultRequest_descriptor, - new java.lang.String[] { "ProcId", }); - internal_static_hbase_pb_GetProcedureResultResponse_descriptor = - getDescriptor().getMessageTypes().get(99); - internal_static_hbase_pb_GetProcedureResultResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetProcedureResultResponse_descriptor, - new java.lang.String[] { "State", "StartTime", "LastUpdate", "Result", "Exception", }); - internal_static_hbase_pb_AbortProcedureRequest_descriptor = - getDescriptor().getMessageTypes().get(100); - internal_static_hbase_pb_AbortProcedureRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_AbortProcedureRequest_descriptor, - new java.lang.String[] { "ProcId", "MayInterruptIfRunning", }); - internal_static_hbase_pb_AbortProcedureResponse_descriptor = - getDescriptor().getMessageTypes().get(101); - internal_static_hbase_pb_AbortProcedureResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_AbortProcedureResponse_descriptor, - new java.lang.String[] { "IsProcedureAborted", }); - internal_static_hbase_pb_ListProceduresRequest_descriptor = - getDescriptor().getMessageTypes().get(102); - internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ListProceduresRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_ListProceduresResponse_descriptor = - getDescriptor().getMessageTypes().get(103); - internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ListProceduresResponse_descriptor, - new java.lang.String[] { "Procedure", }); - internal_static_hbase_pb_SetQuotaRequest_descriptor = - getDescriptor().getMessageTypes().get(104); - internal_static_hbase_pb_SetQuotaRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SetQuotaRequest_descriptor, - new java.lang.String[] { "UserName", "UserGroup", "Namespace", "TableName", "RemoveAll", "BypassGlobals", "Throttle", }); - internal_static_hbase_pb_SetQuotaResponse_descriptor = - getDescriptor().getMessageTypes().get(105); - internal_static_hbase_pb_SetQuotaResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SetQuotaResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor = - getDescriptor().getMessageTypes().get(106); - internal_static_hbase_pb_MajorCompactionTimestampRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor, - new java.lang.String[] { "TableName", }); - internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(107); - internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor, - new java.lang.String[] { "Region", }); - internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor = - getDescriptor().getMessageTypes().get(108); - internal_static_hbase_pb_MajorCompactionTimestampResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor, - new java.lang.String[] { "CompactionTimestamp", }); - internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor = - getDescriptor().getMessageTypes().get(109); - internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor = - getDescriptor().getMessageTypes().get(110); - internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor, - new java.lang.String[] { "Capabilities", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { @@ -66856,6 +69748,678 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_AddColumnRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_AddColumnRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_AddColumnRequest_descriptor, + new java.lang.String[] { "TableName", "ColumnFamilies", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_AddColumnResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_AddColumnResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_AddColumnResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_DeleteColumnRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_DeleteColumnRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteColumnRequest_descriptor, + new java.lang.String[] { "TableName", "ColumnName", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_DeleteColumnResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_DeleteColumnResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteColumnResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_ModifyColumnRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_ModifyColumnRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ModifyColumnRequest_descriptor, + new java.lang.String[] { "TableName", "ColumnFamilies", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_ModifyColumnResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_ModifyColumnResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ModifyColumnResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_MoveRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_MoveRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MoveRegionRequest_descriptor, + new java.lang.String[] { "Region", "DestServerName", }); + internal_static_hbase_pb_MoveRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_MoveRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MoveRegionResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_DispatchMergingRegionsRequest_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_DispatchMergingRegionsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DispatchMergingRegionsRequest_descriptor, + new java.lang.String[] { "RegionA", "RegionB", "Forcible", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_DispatchMergingRegionsResponse_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_hbase_pb_DispatchMergingRegionsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DispatchMergingRegionsResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_AssignRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_hbase_pb_AssignRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_AssignRegionRequest_descriptor, + new java.lang.String[] { "Region", }); + internal_static_hbase_pb_AssignRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_hbase_pb_AssignRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_AssignRegionResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_UnassignRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_hbase_pb_UnassignRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_UnassignRegionRequest_descriptor, + new java.lang.String[] { "Region", "Force", }); + internal_static_hbase_pb_UnassignRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_hbase_pb_UnassignRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_UnassignRegionResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_OfflineRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_hbase_pb_OfflineRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_OfflineRegionRequest_descriptor, + new java.lang.String[] { "Region", }); + internal_static_hbase_pb_OfflineRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_hbase_pb_OfflineRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_OfflineRegionResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_CreateTableRequest_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_hbase_pb_CreateTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CreateTableRequest_descriptor, + new java.lang.String[] { "TableSchema", "SplitKeys", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_CreateTableResponse_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_hbase_pb_CreateTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CreateTableResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_DeleteTableRequest_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_hbase_pb_DeleteTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteTableRequest_descriptor, + new java.lang.String[] { "TableName", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_DeleteTableResponse_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_hbase_pb_DeleteTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteTableResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_TruncateTableRequest_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_hbase_pb_TruncateTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TruncateTableRequest_descriptor, + new java.lang.String[] { "TableName", "PreserveSplits", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_TruncateTableResponse_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_hbase_pb_TruncateTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TruncateTableResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_EnableTableRequest_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_hbase_pb_EnableTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_EnableTableRequest_descriptor, + new java.lang.String[] { "TableName", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_EnableTableResponse_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_hbase_pb_EnableTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_EnableTableResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_DisableTableRequest_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_hbase_pb_DisableTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DisableTableRequest_descriptor, + new java.lang.String[] { "TableName", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_DisableTableResponse_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_hbase_pb_DisableTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DisableTableResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_ModifyTableRequest_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_hbase_pb_ModifyTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ModifyTableRequest_descriptor, + new java.lang.String[] { "TableName", "TableSchema", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_ModifyTableResponse_descriptor = + getDescriptor().getMessageTypes().get(27); + internal_static_hbase_pb_ModifyTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ModifyTableResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_CreateNamespaceRequest_descriptor = + getDescriptor().getMessageTypes().get(28); + internal_static_hbase_pb_CreateNamespaceRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CreateNamespaceRequest_descriptor, + new java.lang.String[] { "NamespaceDescriptor", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_CreateNamespaceResponse_descriptor = + getDescriptor().getMessageTypes().get(29); + internal_static_hbase_pb_CreateNamespaceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CreateNamespaceResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_DeleteNamespaceRequest_descriptor = + getDescriptor().getMessageTypes().get(30); + internal_static_hbase_pb_DeleteNamespaceRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteNamespaceRequest_descriptor, + new java.lang.String[] { "NamespaceName", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_DeleteNamespaceResponse_descriptor = + getDescriptor().getMessageTypes().get(31); + internal_static_hbase_pb_DeleteNamespaceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteNamespaceResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_ModifyNamespaceRequest_descriptor = + getDescriptor().getMessageTypes().get(32); + internal_static_hbase_pb_ModifyNamespaceRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ModifyNamespaceRequest_descriptor, + new java.lang.String[] { "NamespaceDescriptor", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_ModifyNamespaceResponse_descriptor = + getDescriptor().getMessageTypes().get(33); + internal_static_hbase_pb_ModifyNamespaceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ModifyNamespaceResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_GetNamespaceDescriptorRequest_descriptor = + getDescriptor().getMessageTypes().get(34); + internal_static_hbase_pb_GetNamespaceDescriptorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetNamespaceDescriptorRequest_descriptor, + new java.lang.String[] { "NamespaceName", }); + internal_static_hbase_pb_GetNamespaceDescriptorResponse_descriptor = + getDescriptor().getMessageTypes().get(35); + internal_static_hbase_pb_GetNamespaceDescriptorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetNamespaceDescriptorResponse_descriptor, + new java.lang.String[] { "NamespaceDescriptor", }); + internal_static_hbase_pb_ListNamespaceDescriptorsRequest_descriptor = + getDescriptor().getMessageTypes().get(36); + internal_static_hbase_pb_ListNamespaceDescriptorsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ListNamespaceDescriptorsRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_ListNamespaceDescriptorsResponse_descriptor = + getDescriptor().getMessageTypes().get(37); + internal_static_hbase_pb_ListNamespaceDescriptorsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ListNamespaceDescriptorsResponse_descriptor, + new java.lang.String[] { "NamespaceDescriptor", }); + internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_descriptor = + getDescriptor().getMessageTypes().get(38); + internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ListTableDescriptorsByNamespaceRequest_descriptor, + new java.lang.String[] { "NamespaceName", }); + internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_descriptor = + getDescriptor().getMessageTypes().get(39); + internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ListTableDescriptorsByNamespaceResponse_descriptor, + new java.lang.String[] { "TableSchema", }); + internal_static_hbase_pb_ListTableNamesByNamespaceRequest_descriptor = + getDescriptor().getMessageTypes().get(40); + internal_static_hbase_pb_ListTableNamesByNamespaceRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ListTableNamesByNamespaceRequest_descriptor, + new java.lang.String[] { "NamespaceName", }); + internal_static_hbase_pb_ListTableNamesByNamespaceResponse_descriptor = + getDescriptor().getMessageTypes().get(41); + internal_static_hbase_pb_ListTableNamesByNamespaceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ListTableNamesByNamespaceResponse_descriptor, + new java.lang.String[] { "TableName", }); + internal_static_hbase_pb_ShutdownRequest_descriptor = + getDescriptor().getMessageTypes().get(42); + internal_static_hbase_pb_ShutdownRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ShutdownRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_ShutdownResponse_descriptor = + getDescriptor().getMessageTypes().get(43); + internal_static_hbase_pb_ShutdownResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ShutdownResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_StopMasterRequest_descriptor = + getDescriptor().getMessageTypes().get(44); + internal_static_hbase_pb_StopMasterRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_StopMasterRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_StopMasterResponse_descriptor = + getDescriptor().getMessageTypes().get(45); + internal_static_hbase_pb_StopMasterResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_StopMasterResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_IsInMaintenanceModeRequest_descriptor = + getDescriptor().getMessageTypes().get(46); + internal_static_hbase_pb_IsInMaintenanceModeRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsInMaintenanceModeRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_IsInMaintenanceModeResponse_descriptor = + getDescriptor().getMessageTypes().get(47); + internal_static_hbase_pb_IsInMaintenanceModeResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsInMaintenanceModeResponse_descriptor, + new java.lang.String[] { "InMaintenanceMode", }); + internal_static_hbase_pb_BalanceRequest_descriptor = + getDescriptor().getMessageTypes().get(48); + internal_static_hbase_pb_BalanceRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BalanceRequest_descriptor, + new java.lang.String[] { "Force", }); + internal_static_hbase_pb_BalanceResponse_descriptor = + getDescriptor().getMessageTypes().get(49); + internal_static_hbase_pb_BalanceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BalanceResponse_descriptor, + new java.lang.String[] { "BalancerRan", }); + internal_static_hbase_pb_SetBalancerRunningRequest_descriptor = + getDescriptor().getMessageTypes().get(50); + internal_static_hbase_pb_SetBalancerRunningRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SetBalancerRunningRequest_descriptor, + new java.lang.String[] { "On", "Synchronous", }); + internal_static_hbase_pb_SetBalancerRunningResponse_descriptor = + getDescriptor().getMessageTypes().get(51); + internal_static_hbase_pb_SetBalancerRunningResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SetBalancerRunningResponse_descriptor, + new java.lang.String[] { "PrevBalanceValue", }); + internal_static_hbase_pb_IsBalancerEnabledRequest_descriptor = + getDescriptor().getMessageTypes().get(52); + internal_static_hbase_pb_IsBalancerEnabledRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsBalancerEnabledRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_IsBalancerEnabledResponse_descriptor = + getDescriptor().getMessageTypes().get(53); + internal_static_hbase_pb_IsBalancerEnabledResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsBalancerEnabledResponse_descriptor, + new java.lang.String[] { "Enabled", }); + internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor = + getDescriptor().getMessageTypes().get(54); + internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SetSplitOrMergeEnabledRequest_descriptor, + new java.lang.String[] { "Enabled", "Synchronous", "SwitchTypes", }); + internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor = + getDescriptor().getMessageTypes().get(55); + internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SetSplitOrMergeEnabledResponse_descriptor, + new java.lang.String[] { "PrevValue", }); + internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor = + getDescriptor().getMessageTypes().get(56); + internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsSplitOrMergeEnabledRequest_descriptor, + new java.lang.String[] { "SwitchType", }); + internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor = + getDescriptor().getMessageTypes().get(57); + internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsSplitOrMergeEnabledResponse_descriptor, + new java.lang.String[] { "Enabled", }); + internal_static_hbase_pb_NormalizeRequest_descriptor = + getDescriptor().getMessageTypes().get(58); + internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_NormalizeRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_NormalizeResponse_descriptor = + getDescriptor().getMessageTypes().get(59); + internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_NormalizeResponse_descriptor, + new java.lang.String[] { "NormalizerRan", }); + internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor = + getDescriptor().getMessageTypes().get(60); + internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor, + new java.lang.String[] { "On", }); + internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor = + getDescriptor().getMessageTypes().get(61); + internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor, + new java.lang.String[] { "PrevNormalizerValue", }); + internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor = + getDescriptor().getMessageTypes().get(62); + internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor = + getDescriptor().getMessageTypes().get(63); + internal_static_hbase_pb_IsNormalizerEnabledResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor, + new java.lang.String[] { "Enabled", }); + internal_static_hbase_pb_RunCatalogScanRequest_descriptor = + getDescriptor().getMessageTypes().get(64); + internal_static_hbase_pb_RunCatalogScanRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RunCatalogScanRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_RunCatalogScanResponse_descriptor = + getDescriptor().getMessageTypes().get(65); + internal_static_hbase_pb_RunCatalogScanResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RunCatalogScanResponse_descriptor, + new java.lang.String[] { "ScanResult", }); + internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor = + getDescriptor().getMessageTypes().get(66); + internal_static_hbase_pb_EnableCatalogJanitorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor, + new java.lang.String[] { "Enable", }); + internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor = + getDescriptor().getMessageTypes().get(67); + internal_static_hbase_pb_EnableCatalogJanitorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor, + new java.lang.String[] { "PrevValue", }); + internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor = + getDescriptor().getMessageTypes().get(68); + internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor = + getDescriptor().getMessageTypes().get(69); + internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor, + new java.lang.String[] { "Value", }); + internal_static_hbase_pb_SnapshotRequest_descriptor = + getDescriptor().getMessageTypes().get(70); + internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SnapshotRequest_descriptor, + new java.lang.String[] { "Snapshot", }); + internal_static_hbase_pb_SnapshotResponse_descriptor = + getDescriptor().getMessageTypes().get(71); + internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SnapshotResponse_descriptor, + new java.lang.String[] { "ExpectedTimeout", }); + internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor = + getDescriptor().getMessageTypes().get(72); + internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor = + getDescriptor().getMessageTypes().get(73); + internal_static_hbase_pb_GetCompletedSnapshotsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor, + new java.lang.String[] { "Snapshots", }); + internal_static_hbase_pb_DeleteSnapshotRequest_descriptor = + getDescriptor().getMessageTypes().get(74); + internal_static_hbase_pb_DeleteSnapshotRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteSnapshotRequest_descriptor, + new java.lang.String[] { "Snapshot", }); + internal_static_hbase_pb_DeleteSnapshotResponse_descriptor = + getDescriptor().getMessageTypes().get(75); + internal_static_hbase_pb_DeleteSnapshotResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeleteSnapshotResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_RestoreSnapshotRequest_descriptor = + getDescriptor().getMessageTypes().get(76); + internal_static_hbase_pb_RestoreSnapshotRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RestoreSnapshotRequest_descriptor, + new java.lang.String[] { "Snapshot", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_RestoreSnapshotResponse_descriptor = + getDescriptor().getMessageTypes().get(77); + internal_static_hbase_pb_RestoreSnapshotResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RestoreSnapshotResponse_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor = + getDescriptor().getMessageTypes().get(78); + internal_static_hbase_pb_IsSnapshotDoneRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor, + new java.lang.String[] { "Snapshot", }); + internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor = + getDescriptor().getMessageTypes().get(79); + internal_static_hbase_pb_IsSnapshotDoneResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor, + new java.lang.String[] { "Done", "Snapshot", }); + internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor = + getDescriptor().getMessageTypes().get(80); + internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor, + new java.lang.String[] { "Snapshot", }); + internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor = + getDescriptor().getMessageTypes().get(81); + internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor, + new java.lang.String[] { "Done", }); + internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor = + getDescriptor().getMessageTypes().get(82); + internal_static_hbase_pb_GetSchemaAlterStatusRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor, + new java.lang.String[] { "TableName", }); + internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor = + getDescriptor().getMessageTypes().get(83); + internal_static_hbase_pb_GetSchemaAlterStatusResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor, + new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", }); + internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor = + getDescriptor().getMessageTypes().get(84); + internal_static_hbase_pb_GetTableDescriptorsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor, + new java.lang.String[] { "TableNames", "Regex", "IncludeSysTables", "Namespace", }); + internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor = + getDescriptor().getMessageTypes().get(85); + internal_static_hbase_pb_GetTableDescriptorsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor, + new java.lang.String[] { "TableSchema", }); + internal_static_hbase_pb_GetTableNamesRequest_descriptor = + getDescriptor().getMessageTypes().get(86); + internal_static_hbase_pb_GetTableNamesRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetTableNamesRequest_descriptor, + new java.lang.String[] { "Regex", "IncludeSysTables", "Namespace", }); + internal_static_hbase_pb_GetTableNamesResponse_descriptor = + getDescriptor().getMessageTypes().get(87); + internal_static_hbase_pb_GetTableNamesResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetTableNamesResponse_descriptor, + new java.lang.String[] { "TableNames", }); + internal_static_hbase_pb_GetTableStateRequest_descriptor = + getDescriptor().getMessageTypes().get(88); + internal_static_hbase_pb_GetTableStateRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetTableStateRequest_descriptor, + new java.lang.String[] { "TableName", }); + internal_static_hbase_pb_GetTableStateResponse_descriptor = + getDescriptor().getMessageTypes().get(89); + internal_static_hbase_pb_GetTableStateResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetTableStateResponse_descriptor, + new java.lang.String[] { "TableState", }); + internal_static_hbase_pb_GetClusterStatusRequest_descriptor = + getDescriptor().getMessageTypes().get(90); + internal_static_hbase_pb_GetClusterStatusRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetClusterStatusRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_GetClusterStatusResponse_descriptor = + getDescriptor().getMessageTypes().get(91); + internal_static_hbase_pb_GetClusterStatusResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetClusterStatusResponse_descriptor, + new java.lang.String[] { "ClusterStatus", }); + internal_static_hbase_pb_IsMasterRunningRequest_descriptor = + getDescriptor().getMessageTypes().get(92); + internal_static_hbase_pb_IsMasterRunningRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsMasterRunningRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_IsMasterRunningResponse_descriptor = + getDescriptor().getMessageTypes().get(93); + internal_static_hbase_pb_IsMasterRunningResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsMasterRunningResponse_descriptor, + new java.lang.String[] { "IsMasterRunning", }); + internal_static_hbase_pb_ExecProcedureRequest_descriptor = + getDescriptor().getMessageTypes().get(94); + internal_static_hbase_pb_ExecProcedureRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ExecProcedureRequest_descriptor, + new java.lang.String[] { "Procedure", }); + internal_static_hbase_pb_ExecProcedureResponse_descriptor = + getDescriptor().getMessageTypes().get(95); + internal_static_hbase_pb_ExecProcedureResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ExecProcedureResponse_descriptor, + new java.lang.String[] { "ExpectedTimeout", "ReturnData", }); + internal_static_hbase_pb_IsProcedureDoneRequest_descriptor = + getDescriptor().getMessageTypes().get(96); + internal_static_hbase_pb_IsProcedureDoneRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsProcedureDoneRequest_descriptor, + new java.lang.String[] { "Procedure", }); + internal_static_hbase_pb_IsProcedureDoneResponse_descriptor = + getDescriptor().getMessageTypes().get(97); + internal_static_hbase_pb_IsProcedureDoneResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_IsProcedureDoneResponse_descriptor, + new java.lang.String[] { "Done", "Snapshot", }); + internal_static_hbase_pb_GetProcedureResultRequest_descriptor = + getDescriptor().getMessageTypes().get(98); + internal_static_hbase_pb_GetProcedureResultRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetProcedureResultRequest_descriptor, + new java.lang.String[] { "ProcId", }); + internal_static_hbase_pb_GetProcedureResultResponse_descriptor = + getDescriptor().getMessageTypes().get(99); + internal_static_hbase_pb_GetProcedureResultResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetProcedureResultResponse_descriptor, + new java.lang.String[] { "State", "StartTime", "LastUpdate", "Result", "Exception", }); + internal_static_hbase_pb_AbortProcedureRequest_descriptor = + getDescriptor().getMessageTypes().get(100); + internal_static_hbase_pb_AbortProcedureRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_AbortProcedureRequest_descriptor, + new java.lang.String[] { "ProcId", "MayInterruptIfRunning", }); + internal_static_hbase_pb_AbortProcedureResponse_descriptor = + getDescriptor().getMessageTypes().get(101); + internal_static_hbase_pb_AbortProcedureResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_AbortProcedureResponse_descriptor, + new java.lang.String[] { "IsProcedureAborted", }); + internal_static_hbase_pb_ListProceduresRequest_descriptor = + getDescriptor().getMessageTypes().get(102); + internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ListProceduresRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_ListProceduresResponse_descriptor = + getDescriptor().getMessageTypes().get(103); + internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ListProceduresResponse_descriptor, + new java.lang.String[] { "Procedure", }); + internal_static_hbase_pb_SetQuotaRequest_descriptor = + getDescriptor().getMessageTypes().get(104); + internal_static_hbase_pb_SetQuotaRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SetQuotaRequest_descriptor, + new java.lang.String[] { "UserName", "UserGroup", "Namespace", "TableName", "RemoveAll", "BypassGlobals", "Throttle", }); + internal_static_hbase_pb_SetQuotaResponse_descriptor = + getDescriptor().getMessageTypes().get(105); + internal_static_hbase_pb_SetQuotaResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SetQuotaResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor = + getDescriptor().getMessageTypes().get(106); + internal_static_hbase_pb_MajorCompactionTimestampRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor, + new java.lang.String[] { "TableName", }); + internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(107); + internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor, + new java.lang.String[] { "Region", }); + internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor = + getDescriptor().getMessageTypes().get(108); + internal_static_hbase_pb_MajorCompactionTimestampResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor, + new java.lang.String[] { "CompactionTimestamp", }); + internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor = + getDescriptor().getMessageTypes().get(109); + internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor = + getDescriptor().getMessageTypes().get(110); + internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor, + new java.lang.String[] { "Capabilities", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java index 58a6cf5..40e4611 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java @@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class ProcedureProtos { private ProcedureProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } /** * Protobuf enum {@code hbase.pb.ProcedureState} @@ -14,108 +20,118 @@ public final class ProcedureProtos { public enum ProcedureState implements com.google.protobuf.ProtocolMessageEnum { /** - * INITIALIZING = 1; - * *
      * Procedure in construction, not yet added to the executor
      * 
+ * + * INITIALIZING = 1; */ - INITIALIZING(0, 1), + INITIALIZING(1), /** - * RUNNABLE = 2; - * *
      * Procedure added to the executor, and ready to be executed
      * 
+ * + * RUNNABLE = 2; */ - RUNNABLE(1, 2), + RUNNABLE(2), /** - * WAITING = 3; - * *
      * The procedure is waiting on children to be completed
      * 
+ * + * WAITING = 3; */ - WAITING(2, 3), + WAITING(3), /** - * WAITING_TIMEOUT = 4; - * *
      * The procedure is waiting a timout or an external event
      * 
+ * + * WAITING_TIMEOUT = 4; */ - WAITING_TIMEOUT(3, 4), + WAITING_TIMEOUT(4), /** - * ROLLEDBACK = 5; - * *
      * The procedure failed and was rolledback
      * 
+ * + * ROLLEDBACK = 5; */ - ROLLEDBACK(4, 5), + ROLLEDBACK(5), /** - * FINISHED = 6; - * *
      * The procedure execution is completed. may need a rollback if failed.
      * 
+ * + * FINISHED = 6; */ - FINISHED(5, 6), + FINISHED(6), ; /** - * INITIALIZING = 1; - * *
      * Procedure in construction, not yet added to the executor
      * 
+ * + * INITIALIZING = 1; */ public static final int INITIALIZING_VALUE = 1; /** - * RUNNABLE = 2; - * *
      * Procedure added to the executor, and ready to be executed
      * 
+ * + * RUNNABLE = 2; */ public static final int RUNNABLE_VALUE = 2; /** - * WAITING = 3; - * *
      * The procedure is waiting on children to be completed
      * 
+ * + * WAITING = 3; */ public static final int WAITING_VALUE = 3; /** - * WAITING_TIMEOUT = 4; - * *
      * The procedure is waiting a timout or an external event
      * 
+ * + * WAITING_TIMEOUT = 4; */ public static final int WAITING_TIMEOUT_VALUE = 4; /** - * ROLLEDBACK = 5; - * *
      * The procedure failed and was rolledback
      * 
+ * + * ROLLEDBACK = 5; */ public static final int ROLLEDBACK_VALUE = 5; /** - * FINISHED = 6; - * *
      * The procedure execution is completed. may need a rollback if failed.
      * 
+ * + * FINISHED = 6; */ public static final int FINISHED_VALUE = 6; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static ProcedureState valueOf(int value) { + return forNumber(value); + } + + public static ProcedureState forNumber(int value) { switch (value) { case 1: return INITIALIZING; case 2: return RUNNABLE; @@ -131,17 +147,17 @@ public final class ProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + ProcedureState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public ProcedureState findValueByNumber(int number) { - return ProcedureState.valueOf(number); + return ProcedureState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -163,66 +179,62 @@ public final class ProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private ProcedureState(int index, int value) { - this.index = index; + private ProcedureState(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.ProcedureState) } - public interface ProcedureOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ProcedureOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Procedure) + com.google.protobuf.MessageOrBuilder { - // required string class_name = 1; /** - * required string class_name = 1; - * *
      * internal "static" state
      * 
+ * + * required string class_name = 1; */ boolean hasClassName(); /** - * required string class_name = 1; - * *
      * internal "static" state
      * 
+ * + * required string class_name = 1; */ java.lang.String getClassName(); /** - * required string class_name = 1; - * *
      * internal "static" state
      * 
+ * + * required string class_name = 1; */ com.google.protobuf.ByteString getClassNameBytes(); - // optional uint64 parent_id = 2; /** - * optional uint64 parent_id = 2; - * *
      * parent if not a root-procedure otherwise not set
      * 
+ * + * optional uint64 parent_id = 2; */ boolean hasParentId(); /** - * optional uint64 parent_id = 2; - * *
      * parent if not a root-procedure otherwise not set
      * 
+ * + * optional uint64 parent_id = 2; */ long getParentId(); - // required uint64 proc_id = 3; /** * required uint64 proc_id = 3; */ @@ -232,7 +244,6 @@ public final class ProcedureProtos { */ long getProcId(); - // required uint64 start_time = 4; /** * required uint64 start_time = 4; */ @@ -242,7 +253,6 @@ public final class ProcedureProtos { */ long getStartTime(); - // optional string owner = 5; /** * optional string owner = 5; */ @@ -257,51 +267,48 @@ public final class ProcedureProtos { com.google.protobuf.ByteString getOwnerBytes(); - // required .hbase.pb.ProcedureState state = 6; /** - * required .hbase.pb.ProcedureState state = 6; - * *
      * internal "runtime" state
      * 
+ * + * required .hbase.pb.ProcedureState state = 6; */ boolean hasState(); /** - * required .hbase.pb.ProcedureState state = 6; - * *
      * internal "runtime" state
      * 
+ * + * required .hbase.pb.ProcedureState state = 6; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState getState(); - // repeated uint32 stack_id = 7; /** - * repeated uint32 stack_id = 7; - * *
      * stack indices in case the procedure was running
      * 
+ * + * repeated uint32 stack_id = 7; */ java.util.List getStackIdList(); /** - * repeated uint32 stack_id = 7; - * *
      * stack indices in case the procedure was running
      * 
+ * + * repeated uint32 stack_id = 7; */ int getStackIdCount(); /** - * repeated uint32 stack_id = 7; - * *
      * stack indices in case the procedure was running
      * 
+ * + * repeated uint32 stack_id = 7; */ int getStackId(int index); - // required uint64 last_update = 8; /** * required uint64 last_update = 8; */ @@ -311,7 +318,6 @@ public final class ProcedureProtos { */ long getLastUpdate(); - // optional uint32 timeout = 9; /** * optional uint32 timeout = 9; */ @@ -321,87 +327,82 @@ public final class ProcedureProtos { */ int getTimeout(); - // optional .hbase.pb.ForeignExceptionMessage exception = 10; /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
      * user state/results
      * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ boolean hasException(); /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
      * user state/results
      * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException(); /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
      * user state/results
      * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder(); - // optional bytes result = 11; /** - * optional bytes result = 11; - * *
      * opaque (user) result structure
      * 
+ * + * optional bytes result = 11; */ boolean hasResult(); /** - * optional bytes result = 11; - * *
      * opaque (user) result structure
      * 
+ * + * optional bytes result = 11; */ com.google.protobuf.ByteString getResult(); - // optional bytes state_data = 12; /** - * optional bytes state_data = 12; - * *
      * opaque (user) procedure internal-state
      * 
+ * + * optional bytes state_data = 12; */ boolean hasStateData(); /** - * optional bytes state_data = 12; - * *
      * opaque (user) procedure internal-state
      * 
+ * + * optional bytes state_data = 12; */ com.google.protobuf.ByteString getStateData(); - // optional uint64 nonce_group = 13 [default = 0]; /** - * optional uint64 nonce_group = 13 [default = 0]; - * *
      * Nonce to prevent same procedure submit by multiple times
      * 
+ * + * optional uint64 nonce_group = 13 [default = 0]; */ boolean hasNonceGroup(); /** - * optional uint64 nonce_group = 13 [default = 0]; - * *
      * Nonce to prevent same procedure submit by multiple times
      * 
+ * + * optional uint64 nonce_group = 13 [default = 0]; */ long getNonceGroup(); - // optional uint64 nonce = 14 [default = 0]; /** * optional uint64 nonce = 14 [default = 0]; */ @@ -412,43 +413,47 @@ public final class ProcedureProtos { long getNonce(); } /** - * Protobuf type {@code hbase.pb.Procedure} - * *
    **
    * Procedure metadata, serialized by the ProcedureStore to be able to recover the old state.
    * 
+ * + * Protobuf type {@code hbase.pb.Procedure} */ - public static final class Procedure extends - com.google.protobuf.GeneratedMessage - implements ProcedureOrBuilder { + public static final class Procedure extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Procedure) + ProcedureOrBuilder { // Use Procedure.newBuilder() to construct. - private Procedure(com.google.protobuf.GeneratedMessage.Builder builder) { + private Procedure(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private Procedure(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Procedure defaultInstance; - public static Procedure getDefaultInstance() { - return defaultInstance; } - - public Procedure getDefaultInstanceForType() { - return defaultInstance; + private Procedure() { + className_ = ""; + parentId_ = 0L; + procId_ = 0L; + startTime_ = 0L; + owner_ = ""; + state_ = 1; + stackId_ = java.util.Collections.emptyList(); + lastUpdate_ = 0L; + timeout_ = 0; + result_ = com.google.protobuf.ByteString.EMPTY; + stateData_ = com.google.protobuf.ByteString.EMPTY; + nonceGroup_ = 0L; + nonce_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Procedure( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -468,8 +473,9 @@ public final class ProcedureProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - className_ = input.readBytes(); + className_ = bs; break; } case 16: { @@ -488,8 +494,9 @@ public final class ProcedureProtos { break; } case 42: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; - owner_ = input.readBytes(); + owner_ = bs; break; } case 48: { @@ -499,7 +506,7 @@ public final class ProcedureProtos { unknownFields.mergeVarintField(6, rawValue); } else { bitField0_ |= 0x00000020; - state_ = value; + state_ = rawValue; } break; } @@ -573,7 +580,7 @@ public final class ProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { stackId_ = java.util.Collections.unmodifiableList(stackId_); @@ -587,48 +594,32 @@ public final class ProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_Procedure_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_Procedure_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Procedure parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Procedure(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string class_name = 1; public static final int CLASS_NAME_FIELD_NUMBER = 1; - private java.lang.Object className_; + private volatile java.lang.Object className_; /** - * required string class_name = 1; - * *
      * internal "static" state
      * 
+ * + * required string class_name = 1; */ public boolean hasClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string class_name = 1; - * *
      * internal "static" state
      * 
+ * + * required string class_name = 1; */ public java.lang.String getClassName() { java.lang.Object ref = className_; @@ -645,11 +636,11 @@ public final class ProcedureProtos { } } /** - * required string class_name = 1; - * *
      * internal "static" state
      * 
+ * + * required string class_name = 1; */ public com.google.protobuf.ByteString getClassNameBytes() { @@ -665,31 +656,29 @@ public final class ProcedureProtos { } } - // optional uint64 parent_id = 2; public static final int PARENT_ID_FIELD_NUMBER = 2; private long parentId_; /** - * optional uint64 parent_id = 2; - * *
      * parent if not a root-procedure otherwise not set
      * 
+ * + * optional uint64 parent_id = 2; */ public boolean hasParentId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint64 parent_id = 2; - * *
      * parent if not a root-procedure otherwise not set
      * 
+ * + * optional uint64 parent_id = 2; */ public long getParentId() { return parentId_; } - // required uint64 proc_id = 3; public static final int PROC_ID_FIELD_NUMBER = 3; private long procId_; /** @@ -705,7 +694,6 @@ public final class ProcedureProtos { return procId_; } - // required uint64 start_time = 4; public static final int START_TIME_FIELD_NUMBER = 4; private long startTime_; /** @@ -721,9 +709,8 @@ public final class ProcedureProtos { return startTime_; } - // optional string owner = 5; public static final int OWNER_FIELD_NUMBER = 5; - private java.lang.Object owner_; + private volatile java.lang.Object owner_; /** * optional string owner = 5; */ @@ -764,66 +751,64 @@ public final class ProcedureProtos { } } - // required .hbase.pb.ProcedureState state = 6; public static final int STATE_FIELD_NUMBER = 6; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState state_; + private int state_; /** - * required .hbase.pb.ProcedureState state = 6; - * *
      * internal "runtime" state
      * 
+ * + * required .hbase.pb.ProcedureState state = 6; */ public boolean hasState() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * required .hbase.pb.ProcedureState state = 6; - * *
      * internal "runtime" state
      * 
+ * + * required .hbase.pb.ProcedureState state = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState result = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING : result; } - // repeated uint32 stack_id = 7; public static final int STACK_ID_FIELD_NUMBER = 7; private java.util.List stackId_; /** - * repeated uint32 stack_id = 7; - * *
      * stack indices in case the procedure was running
      * 
+ * + * repeated uint32 stack_id = 7; */ public java.util.List getStackIdList() { return stackId_; } /** - * repeated uint32 stack_id = 7; - * *
      * stack indices in case the procedure was running
      * 
+ * + * repeated uint32 stack_id = 7; */ public int getStackIdCount() { return stackId_.size(); } /** - * repeated uint32 stack_id = 7; - * *
      * stack indices in case the procedure was running
      * 
+ * + * repeated uint32 stack_id = 7; */ public int getStackId(int index) { return stackId_.get(index); } - // required uint64 last_update = 8; public static final int LAST_UPDATE_FIELD_NUMBER = 8; private long lastUpdate_; /** @@ -839,7 +824,6 @@ public final class ProcedureProtos { return lastUpdate_; } - // optional uint32 timeout = 9; public static final int TIMEOUT_FIELD_NUMBER = 9; private int timeout_; /** @@ -855,113 +839,108 @@ public final class ProcedureProtos { return timeout_; } - // optional .hbase.pb.ForeignExceptionMessage exception = 10; public static final int EXCEPTION_FIELD_NUMBER = 10; private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_; /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
      * user state/results
      * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public boolean hasException() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
      * user state/results
      * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException() { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
      * user state/results
      * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder() { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } - // optional bytes result = 11; public static final int RESULT_FIELD_NUMBER = 11; private com.google.protobuf.ByteString result_; /** - * optional bytes result = 11; - * *
      * opaque (user) result structure
      * 
+ * + * optional bytes result = 11; */ public boolean hasResult() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** - * optional bytes result = 11; - * *
      * opaque (user) result structure
      * 
+ * + * optional bytes result = 11; */ public com.google.protobuf.ByteString getResult() { return result_; } - // optional bytes state_data = 12; public static final int STATE_DATA_FIELD_NUMBER = 12; private com.google.protobuf.ByteString stateData_; /** - * optional bytes state_data = 12; - * *
      * opaque (user) procedure internal-state
      * 
+ * + * optional bytes state_data = 12; */ public boolean hasStateData() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** - * optional bytes state_data = 12; - * *
      * opaque (user) procedure internal-state
      * 
+ * + * optional bytes state_data = 12; */ public com.google.protobuf.ByteString getStateData() { return stateData_; } - // optional uint64 nonce_group = 13 [default = 0]; public static final int NONCE_GROUP_FIELD_NUMBER = 13; private long nonceGroup_; /** - * optional uint64 nonce_group = 13 [default = 0]; - * *
      * Nonce to prevent same procedure submit by multiple times
      * 
+ * + * optional uint64 nonce_group = 13 [default = 0]; */ public boolean hasNonceGroup() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** - * optional uint64 nonce_group = 13 [default = 0]; - * *
      * Nonce to prevent same procedure submit by multiple times
      * 
+ * + * optional uint64 nonce_group = 13 [default = 0]; */ public long getNonceGroup() { return nonceGroup_; } - // optional uint64 nonce = 14 [default = 0]; public static final int NONCE_FIELD_NUMBER = 14; private long nonce_; /** @@ -977,26 +956,11 @@ public final class ProcedureProtos { return nonce_; } - private void initFields() { - className_ = ""; - parentId_ = 0L; - procId_ = 0L; - startTime_ = 0L; - owner_ = ""; - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING; - stackId_ = java.util.Collections.emptyList(); - lastUpdate_ = 0L; - timeout_ = 0; - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); - result_ = com.google.protobuf.ByteString.EMPTY; - stateData_ = com.google.protobuf.ByteString.EMPTY; - nonceGroup_ = 0L; - nonce_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasClassName()) { memoizedIsInitialized = 0; @@ -1024,9 +988,8 @@ public final class ProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getClassNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, className_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, parentId_); @@ -1038,10 +1001,10 @@ public final class ProcedureProtos { output.writeUInt64(4, startTime_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBytes(5, getOwnerBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, owner_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeEnum(6, state_.getNumber()); + output.writeEnum(6, state_); } for (int i = 0; i < stackId_.size(); i++) { output.writeUInt32(7, stackId_.get(i)); @@ -1053,7 +1016,7 @@ public final class ProcedureProtos { output.writeUInt32(9, timeout_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { - output.writeMessage(10, exception_); + output.writeMessage(10, getException()); } if (((bitField0_ & 0x00000200) == 0x00000200)) { output.writeBytes(11, result_); @@ -1067,18 +1030,16 @@ public final class ProcedureProtos { if (((bitField0_ & 0x00001000) == 0x00001000)) { output.writeUInt64(14, nonce_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getClassNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, className_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -1093,12 +1054,11 @@ public final class ProcedureProtos { .computeUInt64Size(4, startTime_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(5, getOwnerBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, owner_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(6, state_.getNumber()); + .computeEnumSize(6, state_); } { int dataSize = 0; @@ -1119,7 +1079,7 @@ public final class ProcedureProtos { } if (((bitField0_ & 0x00000100) == 0x00000100)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(10, exception_); + .computeMessageSize(10, getException()); } if (((bitField0_ & 0x00000200) == 0x00000200)) { size += com.google.protobuf.CodedOutputStream @@ -1137,19 +1097,13 @@ public final class ProcedureProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(14, nonce_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1187,8 +1141,7 @@ public final class ProcedureProtos { } result = result && (hasState() == other.hasState()); if (hasState()) { - result = result && - (getState() == other.getState()); + result = result && state_ == other.state_; } result = result && getStackIdList() .equals(other.getStackIdList()); @@ -1227,12 +1180,10 @@ public final class ProcedureProtos { result = result && (getNonce() == other.getNonce()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1246,15 +1197,18 @@ public final class ProcedureProtos { } if (hasParentId()) { hash = (37 * hash) + PARENT_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getParentId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getParentId()); } if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getStartTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStartTime()); } if (hasOwner()) { hash = (37 * hash) + OWNER_FIELD_NUMBER; @@ -1262,7 +1216,7 @@ public final class ProcedureProtos { } if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); + hash = (53 * hash) + state_; } if (getStackIdCount() > 0) { hash = (37 * hash) + STACK_ID_FIELD_NUMBER; @@ -1270,7 +1224,8 @@ public final class ProcedureProtos { } if (hasLastUpdate()) { hash = (37 * hash) + LAST_UPDATE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLastUpdate()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLastUpdate()); } if (hasTimeout()) { hash = (37 * hash) + TIMEOUT_FIELD_NUMBER; @@ -1290,13 +1245,15 @@ public final class ProcedureProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1324,66 +1281,78 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.Procedure} - * *
      **
      * Procedure metadata, serialized by the ProcedureStore to be able to recover the old state.
      * 
+ * + * Protobuf type {@code hbase.pb.Procedure} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Procedure) + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_Procedure_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_Procedure_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1396,19 +1365,16 @@ public final class ProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getExceptionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); className_ = ""; @@ -1421,7 +1387,7 @@ public final class ProcedureProtos { bitField0_ = (bitField0_ & ~0x00000008); owner_ = ""; bitField0_ = (bitField0_ & ~0x00000010); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING; + state_ = 1; bitField0_ = (bitField0_ & ~0x00000020); stackId_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); @@ -1430,7 +1396,7 @@ public final class ProcedureProtos { timeout_ = 0; bitField0_ = (bitField0_ & ~0x00000100); if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); + exception_ = null; } else { exceptionBuilder_.clear(); } @@ -1446,10 +1412,6 @@ public final class ProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_Procedure_descriptor; @@ -1537,6 +1499,32 @@ public final class ProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure)other); @@ -1601,29 +1589,25 @@ public final class ProcedureProtos { if (other.hasNonce()) { setNonce(other.getNonce()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasClassName()) { - return false; } if (!hasProcId()) { - return false; } if (!hasStartTime()) { - return false; } if (!hasState()) { - return false; } if (!hasLastUpdate()) { - return false; } return true; @@ -1638,7 +1622,7 @@ public final class ProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1648,42 +1632,44 @@ public final class ProcedureProtos { } private int bitField0_; - // required string class_name = 1; private java.lang.Object className_ = ""; /** - * required string class_name = 1; - * *
        * internal "static" state
        * 
+ * + * required string class_name = 1; */ public boolean hasClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string class_name = 1; - * *
        * internal "static" state
        * 
+ * + * required string class_name = 1; */ public java.lang.String getClassName() { java.lang.Object ref = className_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - className_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + className_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * required string class_name = 1; - * *
        * internal "static" state
        * 
+ * + * required string class_name = 1; */ public com.google.protobuf.ByteString getClassNameBytes() { @@ -1699,11 +1685,11 @@ public final class ProcedureProtos { } } /** - * required string class_name = 1; - * *
        * internal "static" state
        * 
+ * + * required string class_name = 1; */ public Builder setClassName( java.lang.String value) { @@ -1716,11 +1702,11 @@ public final class ProcedureProtos { return this; } /** - * required string class_name = 1; - * *
        * internal "static" state
        * 
+ * + * required string class_name = 1; */ public Builder clearClassName() { bitField0_ = (bitField0_ & ~0x00000001); @@ -1729,11 +1715,11 @@ public final class ProcedureProtos { return this; } /** - * required string class_name = 1; - * *
        * internal "static" state
        * 
+ * + * required string class_name = 1; */ public Builder setClassNameBytes( com.google.protobuf.ByteString value) { @@ -1746,34 +1732,33 @@ public final class ProcedureProtos { return this; } - // optional uint64 parent_id = 2; private long parentId_ ; /** - * optional uint64 parent_id = 2; - * *
        * parent if not a root-procedure otherwise not set
        * 
+ * + * optional uint64 parent_id = 2; */ public boolean hasParentId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint64 parent_id = 2; - * *
        * parent if not a root-procedure otherwise not set
        * 
+ * + * optional uint64 parent_id = 2; */ public long getParentId() { return parentId_; } /** - * optional uint64 parent_id = 2; - * *
        * parent if not a root-procedure otherwise not set
        * 
+ * + * optional uint64 parent_id = 2; */ public Builder setParentId(long value) { bitField0_ |= 0x00000002; @@ -1782,11 +1767,11 @@ public final class ProcedureProtos { return this; } /** - * optional uint64 parent_id = 2; - * *
        * parent if not a root-procedure otherwise not set
        * 
+ * + * optional uint64 parent_id = 2; */ public Builder clearParentId() { bitField0_ = (bitField0_ & ~0x00000002); @@ -1795,7 +1780,6 @@ public final class ProcedureProtos { return this; } - // required uint64 proc_id = 3; private long procId_ ; /** * required uint64 proc_id = 3; @@ -1828,7 +1812,6 @@ public final class ProcedureProtos { return this; } - // required uint64 start_time = 4; private long startTime_ ; /** * required uint64 start_time = 4; @@ -1861,7 +1844,6 @@ public final class ProcedureProtos { return this; } - // optional string owner = 5; private java.lang.Object owner_ = ""; /** * optional string owner = 5; @@ -1875,9 +1857,12 @@ public final class ProcedureProtos { public java.lang.String getOwner() { java.lang.Object ref = owner_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - owner_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + owner_ = s; + } return s; } else { return (java.lang.String) ref; @@ -1935,59 +1920,58 @@ public final class ProcedureProtos { return this; } - // required .hbase.pb.ProcedureState state = 6; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING; + private int state_ = 1; /** - * required .hbase.pb.ProcedureState state = 6; - * *
        * internal "runtime" state
        * 
+ * + * required .hbase.pb.ProcedureState state = 6; */ public boolean hasState() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * required .hbase.pb.ProcedureState state = 6; - * *
        * internal "runtime" state
        * 
+ * + * required .hbase.pb.ProcedureState state = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState result = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING : result; } /** - * required .hbase.pb.ProcedureState state = 6; - * *
        * internal "runtime" state
        * 
+ * + * required .hbase.pb.ProcedureState state = 6; */ public Builder setState(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; - state_ = value; + state_ = value.getNumber(); onChanged(); return this; } /** - * required .hbase.pb.ProcedureState state = 6; - * *
        * internal "runtime" state
        * 
+ * + * required .hbase.pb.ProcedureState state = 6; */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000020); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING; + state_ = 1; onChanged(); return this; } - // repeated uint32 stack_id = 7; private java.util.List stackId_ = java.util.Collections.emptyList(); private void ensureStackIdIsMutable() { if (!((bitField0_ & 0x00000040) == 0x00000040)) { @@ -1996,42 +1980,42 @@ public final class ProcedureProtos { } } /** - * repeated uint32 stack_id = 7; - * *
        * stack indices in case the procedure was running
        * 
+ * + * repeated uint32 stack_id = 7; */ public java.util.List getStackIdList() { return java.util.Collections.unmodifiableList(stackId_); } /** - * repeated uint32 stack_id = 7; - * *
        * stack indices in case the procedure was running
        * 
+ * + * repeated uint32 stack_id = 7; */ public int getStackIdCount() { return stackId_.size(); } /** - * repeated uint32 stack_id = 7; - * *
        * stack indices in case the procedure was running
        * 
+ * + * repeated uint32 stack_id = 7; */ public int getStackId(int index) { return stackId_.get(index); } /** - * repeated uint32 stack_id = 7; - * *
        * stack indices in case the procedure was running
        * 
+ * + * repeated uint32 stack_id = 7; */ public Builder setStackId( int index, int value) { @@ -2041,11 +2025,11 @@ public final class ProcedureProtos { return this; } /** - * repeated uint32 stack_id = 7; - * *
        * stack indices in case the procedure was running
        * 
+ * + * repeated uint32 stack_id = 7; */ public Builder addStackId(int value) { ensureStackIdIsMutable(); @@ -2054,25 +2038,26 @@ public final class ProcedureProtos { return this; } /** - * repeated uint32 stack_id = 7; - * *
        * stack indices in case the procedure was running
        * 
+ * + * repeated uint32 stack_id = 7; */ public Builder addAllStackId( java.lang.Iterable values) { ensureStackIdIsMutable(); - super.addAll(values, stackId_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, stackId_); onChanged(); return this; } /** - * repeated uint32 stack_id = 7; - * *
        * stack indices in case the procedure was running
        * 
+ * + * repeated uint32 stack_id = 7; */ public Builder clearStackId() { stackId_ = java.util.Collections.emptyList(); @@ -2081,7 +2066,6 @@ public final class ProcedureProtos { return this; } - // required uint64 last_update = 8; private long lastUpdate_ ; /** * required uint64 last_update = 8; @@ -2114,7 +2098,6 @@ public final class ProcedureProtos { return this; } - // optional uint32 timeout = 9; private int timeout_ ; /** * optional uint32 timeout = 9; @@ -2147,40 +2130,39 @@ public final class ProcedureProtos { return this; } - // optional .hbase.pb.ForeignExceptionMessage exception = 10; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder> exceptionBuilder_; /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
        * user state/results
        * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public boolean hasException() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
        * user state/results
        * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException() { if (exceptionBuilder_ == null) { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } else { return exceptionBuilder_.getMessage(); } } /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
        * user state/results
        * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public Builder setException(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value) { if (exceptionBuilder_ == null) { @@ -2196,11 +2178,11 @@ public final class ProcedureProtos { return this; } /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
        * user state/results
        * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public Builder setException( org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder builderForValue) { @@ -2214,15 +2196,16 @@ public final class ProcedureProtos { return this; } /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
        * user state/results
        * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public Builder mergeException(org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000200) == 0x00000200) && + exception_ != null && exception_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance()) { exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.newBuilder(exception_).mergeFrom(value).buildPartial(); @@ -2237,15 +2220,15 @@ public final class ProcedureProtos { return this; } /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
        * user state/results
        * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public Builder clearException() { if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); + exception_ = null; onChanged(); } else { exceptionBuilder_.clear(); @@ -2254,11 +2237,11 @@ public final class ProcedureProtos { return this; } /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
        * user state/results
        * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder getExceptionBuilder() { bitField0_ |= 0x00000200; @@ -2266,33 +2249,34 @@ public final class ProcedureProtos { return getExceptionFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
        * user state/results
        * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { - return exception_; + return exception_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance() : exception_; } } /** - * optional .hbase.pb.ForeignExceptionMessage exception = 10; - * *
        * user state/results
        * 
+ * + * optional .hbase.pb.ForeignExceptionMessage exception = 10; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { - exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder>( - exception_, + getException(), getParentForChildren(), isClean()); exception_ = null; @@ -2300,34 +2284,33 @@ public final class ProcedureProtos { return exceptionBuilder_; } - // optional bytes result = 11; private com.google.protobuf.ByteString result_ = com.google.protobuf.ByteString.EMPTY; /** - * optional bytes result = 11; - * *
        * opaque (user) result structure
        * 
+ * + * optional bytes result = 11; */ public boolean hasResult() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** - * optional bytes result = 11; - * *
        * opaque (user) result structure
        * 
+ * + * optional bytes result = 11; */ public com.google.protobuf.ByteString getResult() { return result_; } /** - * optional bytes result = 11; - * *
        * opaque (user) result structure
        * 
+ * + * optional bytes result = 11; */ public Builder setResult(com.google.protobuf.ByteString value) { if (value == null) { @@ -2339,11 +2322,11 @@ public final class ProcedureProtos { return this; } /** - * optional bytes result = 11; - * *
        * opaque (user) result structure
        * 
+ * + * optional bytes result = 11; */ public Builder clearResult() { bitField0_ = (bitField0_ & ~0x00000400); @@ -2352,34 +2335,33 @@ public final class ProcedureProtos { return this; } - // optional bytes state_data = 12; private com.google.protobuf.ByteString stateData_ = com.google.protobuf.ByteString.EMPTY; /** - * optional bytes state_data = 12; - * *
        * opaque (user) procedure internal-state
        * 
+ * + * optional bytes state_data = 12; */ public boolean hasStateData() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** - * optional bytes state_data = 12; - * *
        * opaque (user) procedure internal-state
        * 
+ * + * optional bytes state_data = 12; */ public com.google.protobuf.ByteString getStateData() { return stateData_; } /** - * optional bytes state_data = 12; - * *
        * opaque (user) procedure internal-state
        * 
+ * + * optional bytes state_data = 12; */ public Builder setStateData(com.google.protobuf.ByteString value) { if (value == null) { @@ -2391,11 +2373,11 @@ public final class ProcedureProtos { return this; } /** - * optional bytes state_data = 12; - * *
        * opaque (user) procedure internal-state
        * 
+ * + * optional bytes state_data = 12; */ public Builder clearStateData() { bitField0_ = (bitField0_ & ~0x00000800); @@ -2404,34 +2386,33 @@ public final class ProcedureProtos { return this; } - // optional uint64 nonce_group = 13 [default = 0]; private long nonceGroup_ ; /** - * optional uint64 nonce_group = 13 [default = 0]; - * *
        * Nonce to prevent same procedure submit by multiple times
        * 
+ * + * optional uint64 nonce_group = 13 [default = 0]; */ public boolean hasNonceGroup() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** - * optional uint64 nonce_group = 13 [default = 0]; - * *
        * Nonce to prevent same procedure submit by multiple times
        * 
+ * + * optional uint64 nonce_group = 13 [default = 0]; */ public long getNonceGroup() { return nonceGroup_; } /** - * optional uint64 nonce_group = 13 [default = 0]; - * *
        * Nonce to prevent same procedure submit by multiple times
        * 
+ * + * optional uint64 nonce_group = 13 [default = 0]; */ public Builder setNonceGroup(long value) { bitField0_ |= 0x00001000; @@ -2440,11 +2421,11 @@ public final class ProcedureProtos { return this; } /** - * optional uint64 nonce_group = 13 [default = 0]; - * *
        * Nonce to prevent same procedure submit by multiple times
        * 
+ * + * optional uint64 nonce_group = 13 [default = 0]; */ public Builder clearNonceGroup() { bitField0_ = (bitField0_ & ~0x00001000); @@ -2453,7 +2434,6 @@ public final class ProcedureProtos { return this; } - // optional uint64 nonce = 14 [default = 0]; private long nonce_ ; /** * optional uint64 nonce = 14 [default = 0]; @@ -2485,22 +2465,59 @@ public final class ProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.Procedure) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.Procedure) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Procedure parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Procedure(input, extensionRegistry); + } + }; - // @@protoc_insertion_point(builder_scope:hbase.pb.Procedure) + public static com.google.protobuf.Parser parser() { + return PARSER; } - static { - defaultInstance = new Procedure(true); - defaultInstance.initFields(); + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Procedure) } - public interface SequentialProcedureDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SequentialProcedureDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SequentialProcedureData) + com.google.protobuf.MessageOrBuilder { - // required bool executed = 1; /** * required bool executed = 1; */ @@ -2511,43 +2528,35 @@ public final class ProcedureProtos { boolean getExecuted(); } /** - * Protobuf type {@code hbase.pb.SequentialProcedureData} - * *
    **
    * SequentialProcedure data
    * 
+ * + * Protobuf type {@code hbase.pb.SequentialProcedureData} */ - public static final class SequentialProcedureData extends - com.google.protobuf.GeneratedMessage - implements SequentialProcedureDataOrBuilder { + public static final class SequentialProcedureData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SequentialProcedureData) + SequentialProcedureDataOrBuilder { // Use SequentialProcedureData.newBuilder() to construct. - private SequentialProcedureData(com.google.protobuf.GeneratedMessage.Builder builder) { + private SequentialProcedureData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SequentialProcedureData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SequentialProcedureData defaultInstance; - public static SequentialProcedureData getDefaultInstance() { - return defaultInstance; } - - public SequentialProcedureData getDefaultInstanceForType() { - return defaultInstance; + private SequentialProcedureData() { + executed_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SequentialProcedureData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2577,7 +2586,7 @@ public final class ProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2588,30 +2597,14 @@ public final class ProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_SequentialProcedureData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_SequentialProcedureData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SequentialProcedureData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SequentialProcedureData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bool executed = 1; public static final int EXECUTED_FIELD_NUMBER = 1; private boolean executed_; /** @@ -2627,13 +2620,11 @@ public final class ProcedureProtos { return executed_; } - private void initFields() { - executed_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasExecuted()) { memoizedIsInitialized = 0; @@ -2645,16 +2636,14 @@ public final class ProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, executed_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -2662,19 +2651,13 @@ public final class ProcedureProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, executed_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2690,12 +2673,10 @@ public final class ProcedureProtos { result = result && (getExecuted() == other.getExecuted()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2705,9 +2686,10 @@ public final class ProcedureProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasExecuted()) { hash = (37 * hash) + EXECUTED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getExecuted()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getExecuted()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2735,66 +2717,78 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.SequentialProcedureData} - * *
      **
      * SequentialProcedure data
      * 
+ * + * Protobuf type {@code hbase.pb.SequentialProcedureData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SequentialProcedureData) + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_SequentialProcedureData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_SequentialProcedureData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2807,18 +2801,15 @@ public final class ProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); executed_ = false; @@ -2826,10 +2817,6 @@ public final class ProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_SequentialProcedureData_descriptor; @@ -2860,6 +2847,32 @@ public final class ProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData)other); @@ -2874,13 +2887,13 @@ public final class ProcedureProtos { if (other.hasExecuted()) { setExecuted(other.getExecuted()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasExecuted()) { - return false; } return true; @@ -2895,7 +2908,7 @@ public final class ProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2905,7 +2918,6 @@ public final class ProcedureProtos { } private int bitField0_; - // required bool executed = 1; private boolean executed_ ; /** * required bool executed = 1; @@ -2937,22 +2949,59 @@ public final class ProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SequentialProcedureData) } + // @@protoc_insertion_point(class_scope:hbase.pb.SequentialProcedureData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData DEFAULT_INSTANCE; static { - defaultInstance = new SequentialProcedureData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SequentialProcedureData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SequentialProcedureData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SequentialProcedureData) } - public interface StateMachineProcedureDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StateMachineProcedureDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.StateMachineProcedureData) + com.google.protobuf.MessageOrBuilder { - // repeated uint32 state = 1; /** * repeated uint32 state = 1; */ @@ -2967,43 +3016,35 @@ public final class ProcedureProtos { int getState(int index); } /** - * Protobuf type {@code hbase.pb.StateMachineProcedureData} - * *
    **
    * StateMachineProcedure data
    * 
+ * + * Protobuf type {@code hbase.pb.StateMachineProcedureData} */ - public static final class StateMachineProcedureData extends - com.google.protobuf.GeneratedMessage - implements StateMachineProcedureDataOrBuilder { + public static final class StateMachineProcedureData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.StateMachineProcedureData) + StateMachineProcedureDataOrBuilder { // Use StateMachineProcedureData.newBuilder() to construct. - private StateMachineProcedureData(com.google.protobuf.GeneratedMessage.Builder builder) { + private StateMachineProcedureData(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private StateMachineProcedureData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StateMachineProcedureData defaultInstance; - public static StateMachineProcedureData getDefaultInstance() { - return defaultInstance; - } - - public StateMachineProcedureData getDefaultInstanceForType() { - return defaultInstance; + private StateMachineProcedureData() { + state_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StateMachineProcedureData( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3049,7 +3090,7 @@ public final class ProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { state_ = java.util.Collections.unmodifiableList(state_); @@ -3063,29 +3104,13 @@ public final class ProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_StateMachineProcedureData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_StateMachineProcedureData_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StateMachineProcedureData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StateMachineProcedureData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated uint32 state = 1; public static final int STATE_FIELD_NUMBER = 1; private java.util.List state_; /** @@ -3108,13 +3133,11 @@ public final class ProcedureProtos { return state_.get(index); } - private void initFields() { - state_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -3122,16 +3145,14 @@ public final class ProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < state_.size(); i++) { output.writeUInt32(1, state_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -3144,19 +3165,13 @@ public final class ProcedureProtos { size += dataSize; size += 1 * getStateList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3169,12 +3184,10 @@ public final class ProcedureProtos { boolean result = true; result = result && getStateList() .equals(other.getStateList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3186,7 +3199,7 @@ public final class ProcedureProtos { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + getStateList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3214,66 +3227,78 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.StateMachineProcedureData} - * *
      **
      * StateMachineProcedure data
      * 
+ * + * Protobuf type {@code hbase.pb.StateMachineProcedureData} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureDataOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.StateMachineProcedureData) + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureDataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_StateMachineProcedureData_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_StateMachineProcedureData_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3286,18 +3311,15 @@ public final class ProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); state_ = java.util.Collections.emptyList(); @@ -3305,10 +3327,6 @@ public final class ProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_StateMachineProcedureData_descriptor; @@ -3338,6 +3356,32 @@ public final class ProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData)other); @@ -3359,7 +3403,8 @@ public final class ProcedureProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -3376,7 +3421,7 @@ public final class ProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3386,7 +3431,6 @@ public final class ProcedureProtos { } private int bitField0_; - // repeated uint32 state = 1; private java.util.List state_ = java.util.Collections.emptyList(); private void ensureStateIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { @@ -3438,7 +3482,8 @@ public final class ProcedureProtos { public Builder addAllState( java.lang.Iterable values) { ensureStateIsMutable(); - super.addAll(values, state_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, state_); onChanged(); return this; } @@ -3451,22 +3496,59 @@ public final class ProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.StateMachineProcedureData) } + // @@protoc_insertion_point(class_scope:hbase.pb.StateMachineProcedureData) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData DEFAULT_INSTANCE; static { - defaultInstance = new StateMachineProcedureData(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StateMachineProcedureData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StateMachineProcedureData(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.StateMachineProcedureData) } - public interface ProcedureWALHeaderOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ProcedureWALHeaderOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureWALHeader) + com.google.protobuf.MessageOrBuilder { - // required uint32 version = 1; /** * required uint32 version = 1; */ @@ -3476,7 +3558,6 @@ public final class ProcedureProtos { */ int getVersion(); - // required uint32 type = 2; /** * required uint32 type = 2; */ @@ -3486,7 +3567,6 @@ public final class ProcedureProtos { */ int getType(); - // required uint64 log_id = 3; /** * required uint64 log_id = 3; */ @@ -3496,7 +3576,6 @@ public final class ProcedureProtos { */ long getLogId(); - // required uint64 min_proc_id = 4; /** * required uint64 min_proc_id = 4; */ @@ -3507,43 +3586,38 @@ public final class ProcedureProtos { long getMinProcId(); } /** - * Protobuf type {@code hbase.pb.ProcedureWALHeader} - * *
    **
    * Procedure WAL header
    * 
+ * + * Protobuf type {@code hbase.pb.ProcedureWALHeader} */ - public static final class ProcedureWALHeader extends - com.google.protobuf.GeneratedMessage - implements ProcedureWALHeaderOrBuilder { + public static final class ProcedureWALHeader extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureWALHeader) + ProcedureWALHeaderOrBuilder { // Use ProcedureWALHeader.newBuilder() to construct. - private ProcedureWALHeader(com.google.protobuf.GeneratedMessage.Builder builder) { + private ProcedureWALHeader(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ProcedureWALHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ProcedureWALHeader defaultInstance; - public static ProcedureWALHeader getDefaultInstance() { - return defaultInstance; } - - public ProcedureWALHeader getDefaultInstanceForType() { - return defaultInstance; + private ProcedureWALHeader() { + version_ = 0; + type_ = 0; + logId_ = 0L; + minProcId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ProcedureWALHeader( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3588,7 +3662,7 @@ public final class ProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -3599,30 +3673,14 @@ public final class ProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALHeader_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ProcedureWALHeader parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ProcedureWALHeader(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint32 version = 1; public static final int VERSION_FIELD_NUMBER = 1; private int version_; /** @@ -3638,7 +3696,6 @@ public final class ProcedureProtos { return version_; } - // required uint32 type = 2; public static final int TYPE_FIELD_NUMBER = 2; private int type_; /** @@ -3654,7 +3711,6 @@ public final class ProcedureProtos { return type_; } - // required uint64 log_id = 3; public static final int LOG_ID_FIELD_NUMBER = 3; private long logId_; /** @@ -3670,7 +3726,6 @@ public final class ProcedureProtos { return logId_; } - // required uint64 min_proc_id = 4; public static final int MIN_PROC_ID_FIELD_NUMBER = 4; private long minProcId_; /** @@ -3686,16 +3741,11 @@ public final class ProcedureProtos { return minProcId_; } - private void initFields() { - version_ = 0; - type_ = 0; - logId_ = 0L; - minProcId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasVersion()) { memoizedIsInitialized = 0; @@ -3719,7 +3769,6 @@ public final class ProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, version_); } @@ -3732,12 +3781,11 @@ public final class ProcedureProtos { if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, minProcId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -3757,19 +3805,13 @@ public final class ProcedureProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, minProcId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3800,12 +3842,10 @@ public final class ProcedureProtos { result = result && (getMinProcId() == other.getMinProcId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3823,13 +3863,15 @@ public final class ProcedureProtos { } if (hasLogId()) { hash = (37 * hash) + LOG_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLogId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLogId()); } if (hasMinProcId()) { hash = (37 * hash) + MIN_PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getMinProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getMinProcId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3857,66 +3899,78 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ProcedureWALHeader} - * *
      **
      * Procedure WAL header
      * 
+ * + * Protobuf type {@code hbase.pb.ProcedureWALHeader} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeaderOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureWALHeader) + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALHeader_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3929,18 +3983,15 @@ public final class ProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); version_ = 0; @@ -3954,10 +4005,6 @@ public final class ProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALHeader_descriptor; @@ -4000,6 +4047,32 @@ public final class ProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader)other); @@ -4023,25 +4096,22 @@ public final class ProcedureProtos { if (other.hasMinProcId()) { setMinProcId(other.getMinProcId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasVersion()) { - return false; } if (!hasType()) { - return false; } if (!hasLogId()) { - return false; } if (!hasMinProcId()) { - return false; } return true; @@ -4056,7 +4126,7 @@ public final class ProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4066,7 +4136,6 @@ public final class ProcedureProtos { } private int bitField0_; - // required uint32 version = 1; private int version_ ; /** * required uint32 version = 1; @@ -4099,7 +4168,6 @@ public final class ProcedureProtos { return this; } - // required uint32 type = 2; private int type_ ; /** * required uint32 type = 2; @@ -4132,7 +4200,6 @@ public final class ProcedureProtos { return this; } - // required uint64 log_id = 3; private long logId_ ; /** * required uint64 log_id = 3; @@ -4165,7 +4232,6 @@ public final class ProcedureProtos { return this; } - // required uint64 min_proc_id = 4; private long minProcId_ ; /** * required uint64 min_proc_id = 4; @@ -4197,22 +4263,59 @@ public final class ProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureWALHeader) } + // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureWALHeader) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader DEFAULT_INSTANCE; static { - defaultInstance = new ProcedureWALHeader(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ProcedureWALHeader parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ProcedureWALHeader(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureWALHeader) } - public interface ProcedureWALTrailerOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ProcedureWALTrailerOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureWALTrailer) + com.google.protobuf.MessageOrBuilder { - // required uint32 version = 1; /** * required uint32 version = 1; */ @@ -4222,7 +4325,6 @@ public final class ProcedureProtos { */ int getVersion(); - // required uint64 tracker_pos = 2; /** * required uint64 tracker_pos = 2; */ @@ -4233,43 +4335,36 @@ public final class ProcedureProtos { long getTrackerPos(); } /** - * Protobuf type {@code hbase.pb.ProcedureWALTrailer} - * *
    **
    * Procedure WAL trailer
    * 
+ * + * Protobuf type {@code hbase.pb.ProcedureWALTrailer} */ - public static final class ProcedureWALTrailer extends - com.google.protobuf.GeneratedMessage - implements ProcedureWALTrailerOrBuilder { + public static final class ProcedureWALTrailer extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureWALTrailer) + ProcedureWALTrailerOrBuilder { // Use ProcedureWALTrailer.newBuilder() to construct. - private ProcedureWALTrailer(com.google.protobuf.GeneratedMessage.Builder builder) { + private ProcedureWALTrailer(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ProcedureWALTrailer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ProcedureWALTrailer defaultInstance; - public static ProcedureWALTrailer getDefaultInstance() { - return defaultInstance; - } - - public ProcedureWALTrailer getDefaultInstanceForType() { - return defaultInstance; + private ProcedureWALTrailer() { + version_ = 0; + trackerPos_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ProcedureWALTrailer( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4304,7 +4399,7 @@ public final class ProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -4315,30 +4410,14 @@ public final class ProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ProcedureWALTrailer parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ProcedureWALTrailer(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; + return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer.Builder.class); } private int bitField0_; - // required uint32 version = 1; public static final int VERSION_FIELD_NUMBER = 1; private int version_; /** @@ -4354,7 +4433,6 @@ public final class ProcedureProtos { return version_; } - // required uint64 tracker_pos = 2; public static final int TRACKER_POS_FIELD_NUMBER = 2; private long trackerPos_; /** @@ -4370,14 +4448,11 @@ public final class ProcedureProtos { return trackerPos_; } - private void initFields() { - version_ = 0; - trackerPos_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasVersion()) { memoizedIsInitialized = 0; @@ -4393,19 +4468,17 @@ public final class ProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, version_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, trackerPos_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -4417,19 +4490,13 @@ public final class ProcedureProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, trackerPos_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4450,12 +4517,10 @@ public final class ProcedureProtos { result = result && (getTrackerPos() == other.getTrackerPos()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4469,9 +4534,10 @@ public final class ProcedureProtos { } if (hasTrackerPos()) { hash = (37 * hash) + TRACKER_POS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTrackerPos()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTrackerPos()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4499,66 +4565,78 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ProcedureWALTrailer} - * *
      **
      * Procedure WAL trailer
      * 
+ * + * Protobuf type {@code hbase.pb.ProcedureWALTrailer} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailerOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureWALTrailer) + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4571,18 +4649,15 @@ public final class ProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); version_ = 0; @@ -4592,10 +4667,6 @@ public final class ProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALTrailer_descriptor; @@ -4630,6 +4701,32 @@ public final class ProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer)other); @@ -4647,17 +4744,16 @@ public final class ProcedureProtos { if (other.hasTrackerPos()) { setTrackerPos(other.getTrackerPos()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasVersion()) { - return false; } if (!hasTrackerPos()) { - return false; } return true; @@ -4672,7 +4768,7 @@ public final class ProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4682,7 +4778,6 @@ public final class ProcedureProtos { } private int bitField0_; - // required uint32 version = 1; private int version_ ; /** * required uint32 version = 1; @@ -4715,7 +4810,6 @@ public final class ProcedureProtos { return this; } - // required uint64 tracker_pos = 2; private long trackerPos_ ; /** * required uint64 tracker_pos = 2; @@ -4747,22 +4841,59 @@ public final class ProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureWALTrailer) } + // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureWALTrailer) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer DEFAULT_INSTANCE; static { - defaultInstance = new ProcedureWALTrailer(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ProcedureWALTrailer parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ProcedureWALTrailer(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureWALTrailer) } - public interface ProcedureStoreTrackerOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ProcedureStoreTrackerOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureStoreTracker) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; /** * repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; */ @@ -4790,36 +4921,28 @@ public final class ProcedureProtos { /** * Protobuf type {@code hbase.pb.ProcedureStoreTracker} */ - public static final class ProcedureStoreTracker extends - com.google.protobuf.GeneratedMessage - implements ProcedureStoreTrackerOrBuilder { + public static final class ProcedureStoreTracker extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureStoreTracker) + ProcedureStoreTrackerOrBuilder { // Use ProcedureStoreTracker.newBuilder() to construct. - private ProcedureStoreTracker(com.google.protobuf.GeneratedMessage.Builder builder) { + private ProcedureStoreTracker(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ProcedureStoreTracker(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ProcedureStoreTracker defaultInstance; - public static ProcedureStoreTracker getDefaultInstance() { - return defaultInstance; } - - public ProcedureStoreTracker getDefaultInstanceForType() { - return defaultInstance; + private ProcedureStoreTracker() { + node_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ProcedureStoreTracker( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4843,7 +4966,8 @@ public final class ProcedureProtos { node_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - node_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.PARSER, extensionRegistry)); + node_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.PARSER, extensionRegistry)); break; } } @@ -4852,7 +4976,7 @@ public final class ProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { node_ = java.util.Collections.unmodifiableList(node_); @@ -4866,32 +4990,17 @@ public final class ProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ProcedureStoreTracker parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ProcedureStoreTracker(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public interface TrackerNodeOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TrackerNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureStoreTracker.TrackerNode) + com.google.protobuf.MessageOrBuilder { - // required uint64 start_id = 1; /** * required uint64 start_id = 1; */ @@ -4901,7 +5010,6 @@ public final class ProcedureProtos { */ long getStartId(); - // repeated uint64 updated = 2; /** * repeated uint64 updated = 2; */ @@ -4915,7 +5023,6 @@ public final class ProcedureProtos { */ long getUpdated(int index); - // repeated uint64 deleted = 3; /** * repeated uint64 deleted = 3; */ @@ -4932,36 +5039,30 @@ public final class ProcedureProtos { /** * Protobuf type {@code hbase.pb.ProcedureStoreTracker.TrackerNode} */ - public static final class TrackerNode extends - com.google.protobuf.GeneratedMessage - implements TrackerNodeOrBuilder { + public static final class TrackerNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureStoreTracker.TrackerNode) + TrackerNodeOrBuilder { // Use TrackerNode.newBuilder() to construct. - private TrackerNode(com.google.protobuf.GeneratedMessage.Builder builder) { + private TrackerNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TrackerNode(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TrackerNode defaultInstance; - public static TrackerNode getDefaultInstance() { - return defaultInstance; } - - public TrackerNode getDefaultInstanceForType() { - return defaultInstance; + private TrackerNode() { + startId_ = 0L; + updated_ = java.util.Collections.emptyList(); + deleted_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TrackerNode( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -5033,7 +5134,7 @@ public final class ProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { updated_ = java.util.Collections.unmodifiableList(updated_); @@ -5050,30 +5151,14 @@ public final class ProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TrackerNode parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TrackerNode(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint64 start_id = 1; public static final int START_ID_FIELD_NUMBER = 1; private long startId_; /** @@ -5089,7 +5174,6 @@ public final class ProcedureProtos { return startId_; } - // repeated uint64 updated = 2; public static final int UPDATED_FIELD_NUMBER = 2; private java.util.List updated_; /** @@ -5112,7 +5196,6 @@ public final class ProcedureProtos { return updated_.get(index); } - // repeated uint64 deleted = 3; public static final int DELETED_FIELD_NUMBER = 3; private java.util.List deleted_; /** @@ -5135,15 +5218,11 @@ public final class ProcedureProtos { return deleted_.get(index); } - private void initFields() { - startId_ = 0L; - updated_ = java.util.Collections.emptyList(); - deleted_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasStartId()) { memoizedIsInitialized = 0; @@ -5155,7 +5234,6 @@ public final class ProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, startId_); } @@ -5165,12 +5243,11 @@ public final class ProcedureProtos { for (int i = 0; i < deleted_.size(); i++) { output.writeUInt64(3, deleted_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -5196,19 +5273,13 @@ public final class ProcedureProtos { size += dataSize; size += 1 * getDeletedList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5228,12 +5299,10 @@ public final class ProcedureProtos { .equals(other.getUpdatedList()); result = result && getDeletedList() .equals(other.getDeletedList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5243,7 +5312,8 @@ public final class ProcedureProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasStartId()) { hash = (37 * hash) + START_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getStartId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStartId()); } if (getUpdatedCount() > 0) { hash = (37 * hash) + UPDATED_FIELD_NUMBER; @@ -5253,7 +5323,7 @@ public final class ProcedureProtos { hash = (37 * hash) + DELETED_FIELD_NUMBER; hash = (53 * hash) + getDeletedList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5281,46 +5351,57 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -5328,14 +5409,15 @@ public final class ProcedureProtos { * Protobuf type {@code hbase.pb.ProcedureStoreTracker.TrackerNode} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureStoreTracker.TrackerNode) + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5348,18 +5430,15 @@ public final class ProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); startId_ = 0L; @@ -5371,10 +5450,6 @@ public final class ProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor; @@ -5415,6 +5490,32 @@ public final class ProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode)other); @@ -5449,13 +5550,13 @@ public final class ProcedureProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasStartId()) { - return false; } return true; @@ -5470,7 +5571,7 @@ public final class ProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5480,7 +5581,6 @@ public final class ProcedureProtos { } private int bitField0_; - // required uint64 start_id = 1; private long startId_ ; /** * required uint64 start_id = 1; @@ -5513,7 +5613,6 @@ public final class ProcedureProtos { return this; } - // repeated uint64 updated = 2; private java.util.List updated_ = java.util.Collections.emptyList(); private void ensureUpdatedIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { @@ -5565,7 +5664,8 @@ public final class ProcedureProtos { public Builder addAllUpdated( java.lang.Iterable values) { ensureUpdatedIsMutable(); - super.addAll(values, updated_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, updated_); onChanged(); return this; } @@ -5579,7 +5679,6 @@ public final class ProcedureProtos { return this; } - // repeated uint64 deleted = 3; private java.util.List deleted_ = java.util.Collections.emptyList(); private void ensureDeletedIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { @@ -5631,7 +5730,8 @@ public final class ProcedureProtos { public Builder addAllDeleted( java.lang.Iterable values) { ensureDeletedIsMutable(); - super.addAll(values, deleted_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, deleted_); onChanged(); return this; } @@ -5644,19 +5744,55 @@ public final class ProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureStoreTracker.TrackerNode) } + // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureStoreTracker.TrackerNode) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode DEFAULT_INSTANCE; static { - defaultInstance = new TrackerNode(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TrackerNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TrackerNode(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureStoreTracker.TrackerNode) } - // repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; public static final int NODE_FIELD_NUMBER = 1; private java.util.List node_; /** @@ -5692,13 +5828,11 @@ public final class ProcedureProtos { return node_.get(index); } - private void initFields() { - node_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getNodeCount(); i++) { if (!getNode(i).isInitialized()) { @@ -5712,16 +5846,14 @@ public final class ProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < node_.size(); i++) { output.writeMessage(1, node_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -5729,19 +5861,13 @@ public final class ProcedureProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, node_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5754,12 +5880,10 @@ public final class ProcedureProtos { boolean result = true; result = result && getNodeList() .equals(other.getNodeList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5771,7 +5895,7 @@ public final class ProcedureProtos { hash = (37 * hash) + NODE_FIELD_NUMBER; hash = (53 * hash) + getNodeList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5799,46 +5923,57 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -5846,14 +5981,15 @@ public final class ProcedureProtos { * Protobuf type {@code hbase.pb.ProcedureStoreTracker} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTrackerOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureStoreTracker) + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTrackerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5866,19 +6002,16 @@ public final class ProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getNodeFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (nodeBuilder_ == null) { @@ -5890,10 +6023,6 @@ public final class ProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureStoreTracker_descriptor; @@ -5927,6 +6056,32 @@ public final class ProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker)other); @@ -5957,21 +6112,21 @@ public final class ProcedureProtos { node_ = other.node_; bitField0_ = (bitField0_ & ~0x00000001); nodeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeFieldBuilder() : null; } else { nodeBuilder_.addAllMessages(other.node_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getNodeCount(); i++) { if (!getNode(i).isInitialized()) { - return false; } } @@ -5987,7 +6142,7 @@ public final class ProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5997,7 +6152,6 @@ public final class ProcedureProtos { } private int bitField0_; - // repeated .hbase.pb.ProcedureStoreTracker.TrackerNode node = 1; private java.util.List node_ = java.util.Collections.emptyList(); private void ensureNodeIsMutable() { @@ -6007,7 +6161,7 @@ public final class ProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder> nodeBuilder_; /** @@ -6139,7 +6293,8 @@ public final class ProcedureProtos { java.lang.Iterable values) { if (nodeBuilder_ == null) { ensureNodeIsMutable(); - super.addAll(values, node_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, node_); onChanged(); } else { nodeBuilder_.addAllMessages(values); @@ -6222,11 +6377,11 @@ public final class ProcedureProtos { getNodeBuilderList() { return getNodeFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder> getNodeFieldBuilder() { if (nodeBuilder_ == null) { - nodeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + nodeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNodeOrBuilder>( node_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -6236,22 +6391,59 @@ public final class ProcedureProtos { } return nodeBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureStoreTracker) } + // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureStoreTracker) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker DEFAULT_INSTANCE; static { - defaultInstance = new ProcedureStoreTracker(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ProcedureStoreTracker parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ProcedureStoreTracker(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureStoreTracker) } - public interface ProcedureWALEntryOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ProcedureWALEntryOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureWALEntry) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ProcedureWALEntry.Type type = 1; /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; */ @@ -6261,7 +6453,6 @@ public final class ProcedureProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type getType(); - // repeated .hbase.pb.Procedure procedure = 2; /** * repeated .hbase.pb.Procedure procedure = 2; */ @@ -6286,7 +6477,6 @@ public final class ProcedureProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder( int index); - // optional uint64 proc_id = 3; /** * optional uint64 proc_id = 3; */ @@ -6296,7 +6486,6 @@ public final class ProcedureProtos { */ long getProcId(); - // repeated uint64 child_id = 4; /** * repeated uint64 child_id = 4; */ @@ -6313,36 +6502,31 @@ public final class ProcedureProtos { /** * Protobuf type {@code hbase.pb.ProcedureWALEntry} */ - public static final class ProcedureWALEntry extends - com.google.protobuf.GeneratedMessage - implements ProcedureWALEntryOrBuilder { + public static final class ProcedureWALEntry extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureWALEntry) + ProcedureWALEntryOrBuilder { // Use ProcedureWALEntry.newBuilder() to construct. - private ProcedureWALEntry(com.google.protobuf.GeneratedMessage.Builder builder) { + private ProcedureWALEntry(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ProcedureWALEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ProcedureWALEntry defaultInstance; - public static ProcedureWALEntry getDefaultInstance() { - return defaultInstance; } - - public ProcedureWALEntry getDefaultInstanceForType() { - return defaultInstance; + private ProcedureWALEntry() { + type_ = 1; + procedure_ = java.util.Collections.emptyList(); + procId_ = 0L; + childId_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ProcedureWALEntry( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6368,7 +6552,7 @@ public final class ProcedureProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - type_ = value; + type_ = rawValue; } break; } @@ -6377,7 +6561,8 @@ public final class ProcedureProtos { procedure_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - procedure_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry)); + procedure_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry)); break; } case 24: { @@ -6412,7 +6597,7 @@ public final class ProcedureProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { procedure_ = java.util.Collections.unmodifiableList(procedure_); @@ -6429,28 +6614,13 @@ public final class ProcedureProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALEntry_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALEntry_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ProcedureWALEntry parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ProcedureWALEntry(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.ProcedureWALEntry.Type} */ @@ -6459,27 +6629,27 @@ public final class ProcedureProtos { /** * PROCEDURE_WAL_EOF = 1; */ - PROCEDURE_WAL_EOF(0, 1), + PROCEDURE_WAL_EOF(1), /** * PROCEDURE_WAL_INIT = 2; */ - PROCEDURE_WAL_INIT(1, 2), + PROCEDURE_WAL_INIT(2), /** * PROCEDURE_WAL_INSERT = 3; */ - PROCEDURE_WAL_INSERT(2, 3), + PROCEDURE_WAL_INSERT(3), /** * PROCEDURE_WAL_UPDATE = 4; */ - PROCEDURE_WAL_UPDATE(3, 4), + PROCEDURE_WAL_UPDATE(4), /** * PROCEDURE_WAL_DELETE = 5; */ - PROCEDURE_WAL_DELETE(4, 5), + PROCEDURE_WAL_DELETE(5), /** * PROCEDURE_WAL_COMPACT = 6; */ - PROCEDURE_WAL_COMPACT(5, 6), + PROCEDURE_WAL_COMPACT(6), ; /** @@ -6508,9 +6678,19 @@ public final class ProcedureProtos { public static final int PROCEDURE_WAL_COMPACT_VALUE = 6; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static Type valueOf(int value) { + return forNumber(value); + } + + public static Type forNumber(int value) { switch (value) { case 1: return PROCEDURE_WAL_EOF; case 2: return PROCEDURE_WAL_INIT; @@ -6526,17 +6706,17 @@ public final class ProcedureProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + Type> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public Type findValueByNumber(int number) { - return Type.valueOf(number); + return Type.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -6558,11 +6738,9 @@ public final class ProcedureProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private Type(int index, int value) { - this.index = index; + private Type(int value) { this.value = value; } @@ -6570,9 +6748,8 @@ public final class ProcedureProtos { } private int bitField0_; - // required .hbase.pb.ProcedureWALEntry.Type type = 1; public static final int TYPE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type type_; + private int type_; /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; */ @@ -6583,10 +6760,10 @@ public final class ProcedureProtos { * required .hbase.pb.ProcedureWALEntry.Type type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type getType() { - return type_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.valueOf(type_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.PROCEDURE_WAL_EOF : result; } - // repeated .hbase.pb.Procedure procedure = 2; public static final int PROCEDURE_FIELD_NUMBER = 2; private java.util.List procedure_; /** @@ -6622,7 +6799,6 @@ public final class ProcedureProtos { return procedure_.get(index); } - // optional uint64 proc_id = 3; public static final int PROC_ID_FIELD_NUMBER = 3; private long procId_; /** @@ -6638,7 +6814,6 @@ public final class ProcedureProtos { return procId_; } - // repeated uint64 child_id = 4; public static final int CHILD_ID_FIELD_NUMBER = 4; private java.util.List childId_; /** @@ -6661,16 +6836,11 @@ public final class ProcedureProtos { return childId_.get(index); } - private void initFields() { - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.PROCEDURE_WAL_EOF; - procedure_ = java.util.Collections.emptyList(); - procId_ = 0L; - childId_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasType()) { memoizedIsInitialized = 0; @@ -6688,9 +6858,8 @@ public final class ProcedureProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, type_.getNumber()); + output.writeEnum(1, type_); } for (int i = 0; i < procedure_.size(); i++) { output.writeMessage(2, procedure_.get(i)); @@ -6701,18 +6870,17 @@ public final class ProcedureProtos { for (int i = 0; i < childId_.size(); i++) { output.writeUInt64(4, childId_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, type_.getNumber()); + .computeEnumSize(1, type_); } for (int i = 0; i < procedure_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -6731,19 +6899,13 @@ public final class ProcedureProtos { size += dataSize; size += 1 * getChildIdList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6756,8 +6918,7 @@ public final class ProcedureProtos { boolean result = true; result = result && (hasType() == other.hasType()); if (hasType()) { - result = result && - (getType() == other.getType()); + result = result && type_ == other.type_; } result = result && getProcedureList() .equals(other.getProcedureList()); @@ -6768,12 +6929,10 @@ public final class ProcedureProtos { } result = result && getChildIdList() .equals(other.getChildIdList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6783,7 +6942,7 @@ public final class ProcedureProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getType()); + hash = (53 * hash) + type_; } if (getProcedureCount() > 0) { hash = (37 * hash) + PROCEDURE_FIELD_NUMBER; @@ -6791,13 +6950,14 @@ public final class ProcedureProtos { } if (hasProcId()) { hash = (37 * hash) + PROC_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getProcId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProcId()); } if (getChildIdCount() > 0) { hash = (37 * hash) + CHILD_ID_FIELD_NUMBER; hash = (53 * hash) + getChildIdList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6825,46 +6985,57 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6872,14 +7043,15 @@ public final class ProcedureProtos { * Protobuf type {@code hbase.pb.ProcedureWALEntry} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntryOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureWALEntry) + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntryOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALEntry_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALEntry_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6892,22 +7064,19 @@ public final class ProcedureProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getProcedureFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.PROCEDURE_WAL_EOF; + type_ = 1; bitField0_ = (bitField0_ & ~0x00000001); if (procedureBuilder_ == null) { procedure_ = java.util.Collections.emptyList(); @@ -6922,10 +7091,6 @@ public final class ProcedureProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.internal_static_hbase_pb_ProcedureWALEntry_descriptor; @@ -6974,6 +7139,32 @@ public final class ProcedureProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry)other); @@ -7007,7 +7198,7 @@ public final class ProcedureProtos { procedure_ = other.procedure_; bitField0_ = (bitField0_ & ~0x00000002); procedureBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getProcedureFieldBuilder() : null; } else { procedureBuilder_.addAllMessages(other.procedure_); @@ -7027,18 +7218,17 @@ public final class ProcedureProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasType()) { - return false; } for (int i = 0; i < getProcedureCount(); i++) { if (!getProcedure(i).isInitialized()) { - return false; } } @@ -7054,7 +7244,7 @@ public final class ProcedureProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7064,8 +7254,7 @@ public final class ProcedureProtos { } private int bitField0_; - // required .hbase.pb.ProcedureWALEntry.Type type = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.PROCEDURE_WAL_EOF; + private int type_ = 1; /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; */ @@ -7076,7 +7265,8 @@ public final class ProcedureProtos { * required .hbase.pb.ProcedureWALEntry.Type type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type getType() { - return type_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.valueOf(type_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.PROCEDURE_WAL_EOF : result; } /** * required .hbase.pb.ProcedureWALEntry.Type type = 1; @@ -7086,7 +7276,7 @@ public final class ProcedureProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - type_ = value; + type_ = value.getNumber(); onChanged(); return this; } @@ -7095,12 +7285,11 @@ public final class ProcedureProtos { */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.PROCEDURE_WAL_EOF; + type_ = 1; onChanged(); return this; } - // repeated .hbase.pb.Procedure procedure = 2; private java.util.List procedure_ = java.util.Collections.emptyList(); private void ensureProcedureIsMutable() { @@ -7110,7 +7299,7 @@ public final class ProcedureProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> procedureBuilder_; /** @@ -7242,7 +7431,8 @@ public final class ProcedureProtos { java.lang.Iterable values) { if (procedureBuilder_ == null) { ensureProcedureIsMutable(); - super.addAll(values, procedure_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, procedure_); onChanged(); } else { procedureBuilder_.addAllMessages(values); @@ -7325,11 +7515,11 @@ public final class ProcedureProtos { getProcedureBuilderList() { return getProcedureFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> getProcedureFieldBuilder() { if (procedureBuilder_ == null) { - procedureBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + procedureBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>( procedure_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -7340,7 +7530,6 @@ public final class ProcedureProtos { return procedureBuilder_; } - // optional uint64 proc_id = 3; private long procId_ ; /** * optional uint64 proc_id = 3; @@ -7373,7 +7562,6 @@ public final class ProcedureProtos { return this; } - // repeated uint64 child_id = 4; private java.util.List childId_ = java.util.Collections.emptyList(); private void ensureChildIdIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { @@ -7425,7 +7613,8 @@ public final class ProcedureProtos { public Builder addAllChildId( java.lang.Iterable values) { ensureChildIdIsMutable(); - super.addAll(values, childId_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, childId_); onChanged(); return this; } @@ -7438,64 +7627,101 @@ public final class ProcedureProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureWALEntry) } + // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureWALEntry) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry DEFAULT_INSTANCE; static { - defaultInstance = new ProcedureWALEntry(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ProcedureWALEntry parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ProcedureWALEntry(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureWALEntry) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Procedure_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Procedure_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SequentialProcedureData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SequentialProcedureData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StateMachineProcedureData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_StateMachineProcedureData_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureWALHeader_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureWALHeader_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureWALTrailer_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureWALTrailer_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureStoreTracker_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureStoreTracker_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureWALEntry_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureWALEntry_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -7533,66 +7759,67 @@ public final class ProcedureProtos { "neratedB\017ProcedureProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_Procedure_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_Procedure_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Procedure_descriptor, - new java.lang.String[] { "ClassName", "ParentId", "ProcId", "StartTime", "Owner", "State", "StackId", "LastUpdate", "Timeout", "Exception", "Result", "StateData", "NonceGroup", "Nonce", }); - internal_static_hbase_pb_SequentialProcedureData_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_SequentialProcedureData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SequentialProcedureData_descriptor, - new java.lang.String[] { "Executed", }); - internal_static_hbase_pb_StateMachineProcedureData_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_StateMachineProcedureData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_StateMachineProcedureData_descriptor, - new java.lang.String[] { "State", }); - internal_static_hbase_pb_ProcedureWALHeader_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_ProcedureWALHeader_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ProcedureWALHeader_descriptor, - new java.lang.String[] { "Version", "Type", "LogId", "MinProcId", }); - internal_static_hbase_pb_ProcedureWALTrailer_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_ProcedureWALTrailer_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ProcedureWALTrailer_descriptor, - new java.lang.String[] { "Version", "TrackerPos", }); - internal_static_hbase_pb_ProcedureStoreTracker_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_ProcedureStoreTracker_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ProcedureStoreTracker_descriptor, - new java.lang.String[] { "Node", }); - internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor = - internal_static_hbase_pb_ProcedureStoreTracker_descriptor.getNestedTypes().get(0); - internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor, - new java.lang.String[] { "StartId", "Updated", "Deleted", }); - internal_static_hbase_pb_ProcedureWALEntry_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_ProcedureWALEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ProcedureWALEntry_descriptor, - new java.lang.String[] { "Type", "Procedure", "ProcId", "ChildId", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_Procedure_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_Procedure_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Procedure_descriptor, + new java.lang.String[] { "ClassName", "ParentId", "ProcId", "StartTime", "Owner", "State", "StackId", "LastUpdate", "Timeout", "Exception", "Result", "StateData", "NonceGroup", "Nonce", }); + internal_static_hbase_pb_SequentialProcedureData_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_SequentialProcedureData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SequentialProcedureData_descriptor, + new java.lang.String[] { "Executed", }); + internal_static_hbase_pb_StateMachineProcedureData_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_StateMachineProcedureData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_StateMachineProcedureData_descriptor, + new java.lang.String[] { "State", }); + internal_static_hbase_pb_ProcedureWALHeader_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_ProcedureWALHeader_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ProcedureWALHeader_descriptor, + new java.lang.String[] { "Version", "Type", "LogId", "MinProcId", }); + internal_static_hbase_pb_ProcedureWALTrailer_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_ProcedureWALTrailer_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ProcedureWALTrailer_descriptor, + new java.lang.String[] { "Version", "TrackerPos", }); + internal_static_hbase_pb_ProcedureStoreTracker_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_ProcedureStoreTracker_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ProcedureStoreTracker_descriptor, + new java.lang.String[] { "Node", }); + internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor = + internal_static_hbase_pb_ProcedureStoreTracker_descriptor.getNestedTypes().get(0); + internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ProcedureStoreTracker_TrackerNode_descriptor, + new java.lang.String[] { "StartId", "Updated", "Deleted", }); + internal_static_hbase_pb_ProcedureWALEntry_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_ProcedureWALEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ProcedureWALEntry_descriptor, + new java.lang.String[] { "Type", "Procedure", "ProcId", "ChildId", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java index da4f9d4..d31e338 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java @@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class QuotaProtos { private QuotaProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } /** * Protobuf enum {@code hbase.pb.QuotaScope} @@ -16,11 +22,11 @@ public final class QuotaProtos { /** * CLUSTER = 1; */ - CLUSTER(0, 1), + CLUSTER(1), /** * MACHINE = 2; */ - MACHINE(1, 2), + MACHINE(2), ; /** @@ -33,9 +39,19 @@ public final class QuotaProtos { public static final int MACHINE_VALUE = 2; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static QuotaScope valueOf(int value) { + return forNumber(value); + } + + public static QuotaScope forNumber(int value) { switch (value) { case 1: return CLUSTER; case 2: return MACHINE; @@ -47,17 +63,17 @@ public final class QuotaProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + QuotaScope> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public QuotaScope findValueByNumber(int number) { - return QuotaScope.valueOf(number); + return QuotaScope.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -79,11 +95,9 @@ public final class QuotaProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private QuotaScope(int index, int value) { - this.index = index; + private QuotaScope(int value) { this.value = value; } @@ -98,27 +112,27 @@ public final class QuotaProtos { /** * REQUEST_NUMBER = 1; */ - REQUEST_NUMBER(0, 1), + REQUEST_NUMBER(1), /** * REQUEST_SIZE = 2; */ - REQUEST_SIZE(1, 2), + REQUEST_SIZE(2), /** * WRITE_NUMBER = 3; */ - WRITE_NUMBER(2, 3), + WRITE_NUMBER(3), /** * WRITE_SIZE = 4; */ - WRITE_SIZE(3, 4), + WRITE_SIZE(4), /** * READ_NUMBER = 5; */ - READ_NUMBER(4, 5), + READ_NUMBER(5), /** * READ_SIZE = 6; */ - READ_SIZE(5, 6), + READ_SIZE(6), ; /** @@ -147,9 +161,19 @@ public final class QuotaProtos { public static final int READ_SIZE_VALUE = 6; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static ThrottleType valueOf(int value) { + return forNumber(value); + } + + public static ThrottleType forNumber(int value) { switch (value) { case 1: return REQUEST_NUMBER; case 2: return REQUEST_SIZE; @@ -165,17 +189,17 @@ public final class QuotaProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + ThrottleType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public ThrottleType findValueByNumber(int number) { - return ThrottleType.valueOf(number); + return ThrottleType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -197,11 +221,9 @@ public final class QuotaProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private ThrottleType(int index, int value) { - this.index = index; + private ThrottleType(int value) { this.value = value; } @@ -216,7 +238,7 @@ public final class QuotaProtos { /** * THROTTLE = 1; */ - THROTTLE(0, 1), + THROTTLE(1), ; /** @@ -225,9 +247,19 @@ public final class QuotaProtos { public static final int THROTTLE_VALUE = 1; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static QuotaType valueOf(int value) { + return forNumber(value); + } + + public static QuotaType forNumber(int value) { switch (value) { case 1: return THROTTLE; default: return null; @@ -238,17 +270,17 @@ public final class QuotaProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + QuotaType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public QuotaType findValueByNumber(int number) { - return QuotaType.valueOf(number); + return QuotaType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -270,21 +302,19 @@ public final class QuotaProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private QuotaType(int index, int value) { - this.index = index; + private QuotaType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.QuotaType) } - public interface TimedQuotaOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TimedQuotaOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TimedQuota) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TimeUnit time_unit = 1; /** * required .hbase.pb.TimeUnit time_unit = 1; */ @@ -294,7 +324,6 @@ public final class QuotaProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit(); - // optional uint64 soft_limit = 2; /** * optional uint64 soft_limit = 2; */ @@ -304,7 +333,6 @@ public final class QuotaProtos { */ long getSoftLimit(); - // optional float share = 3; /** * optional float share = 3; */ @@ -314,7 +342,6 @@ public final class QuotaProtos { */ float getShare(); - // optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE]; /** * optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE]; */ @@ -327,36 +354,31 @@ public final class QuotaProtos { /** * Protobuf type {@code hbase.pb.TimedQuota} */ - public static final class TimedQuota extends - com.google.protobuf.GeneratedMessage - implements TimedQuotaOrBuilder { + public static final class TimedQuota extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TimedQuota) + TimedQuotaOrBuilder { // Use TimedQuota.newBuilder() to construct. - private TimedQuota(com.google.protobuf.GeneratedMessage.Builder builder) { + private TimedQuota(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private TimedQuota(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TimedQuota defaultInstance; - public static TimedQuota getDefaultInstance() { - return defaultInstance; - } - - public TimedQuota getDefaultInstanceForType() { - return defaultInstance; + private TimedQuota() { + timeUnit_ = 1; + softLimit_ = 0L; + share_ = 0F; + scope_ = 2; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TimedQuota( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -382,7 +404,7 @@ public final class QuotaProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - timeUnit_ = value; + timeUnit_ = rawValue; } break; } @@ -403,7 +425,7 @@ public final class QuotaProtos { unknownFields.mergeVarintField(4, rawValue); } else { bitField0_ |= 0x00000008; - scope_ = value; + scope_ = rawValue; } break; } @@ -413,7 +435,7 @@ public final class QuotaProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -424,32 +446,16 @@ public final class QuotaProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_TimedQuota_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_TimedQuota_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TimedQuota parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TimedQuota(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TimeUnit time_unit = 1; public static final int TIME_UNIT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit timeUnit_; + private int timeUnit_; /** * required .hbase.pb.TimeUnit time_unit = 1; */ @@ -460,10 +466,10 @@ public final class QuotaProtos { * required .hbase.pb.TimeUnit time_unit = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit() { - return timeUnit_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.valueOf(timeUnit_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS : result; } - // optional uint64 soft_limit = 2; public static final int SOFT_LIMIT_FIELD_NUMBER = 2; private long softLimit_; /** @@ -479,7 +485,6 @@ public final class QuotaProtos { return softLimit_; } - // optional float share = 3; public static final int SHARE_FIELD_NUMBER = 3; private float share_; /** @@ -495,9 +500,8 @@ public final class QuotaProtos { return share_; } - // optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE]; public static final int SCOPE_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope scope_; + private int scope_; /** * optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE]; */ @@ -508,19 +512,15 @@ public final class QuotaProtos { * optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope getScope() { - return scope_; + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.valueOf(scope_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.MACHINE : result; } - private void initFields() { - timeUnit_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS; - softLimit_ = 0L; - share_ = 0F; - scope_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.MACHINE; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTimeUnit()) { memoizedIsInitialized = 0; @@ -532,9 +532,8 @@ public final class QuotaProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, timeUnit_.getNumber()); + output.writeEnum(1, timeUnit_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, softLimit_); @@ -543,20 +542,19 @@ public final class QuotaProtos { output.writeFloat(3, share_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeEnum(4, scope_.getNumber()); + output.writeEnum(4, scope_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, timeUnit_.getNumber()); + .computeEnumSize(1, timeUnit_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -568,21 +566,15 @@ public final class QuotaProtos { } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(4, scope_.getNumber()); + .computeEnumSize(4, scope_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -595,8 +587,7 @@ public final class QuotaProtos { boolean result = true; result = result && (hasTimeUnit() == other.hasTimeUnit()); if (hasTimeUnit()) { - result = result && - (getTimeUnit() == other.getTimeUnit()); + result = result && timeUnit_ == other.timeUnit_; } result = result && (hasSoftLimit() == other.hasSoftLimit()); if (hasSoftLimit()) { @@ -605,19 +596,19 @@ public final class QuotaProtos { } result = result && (hasShare() == other.hasShare()); if (hasShare()) { - result = result && (Float.floatToIntBits(getShare()) == Float.floatToIntBits(other.getShare())); + result = result && ( + java.lang.Float.floatToIntBits(getShare()) + == java.lang.Float.floatToIntBits( + other.getShare())); } result = result && (hasScope() == other.hasScope()); if (hasScope()) { - result = result && - (getScope() == other.getScope()); + result = result && scope_ == other.scope_; } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -627,22 +618,23 @@ public final class QuotaProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTimeUnit()) { hash = (37 * hash) + TIME_UNIT_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getTimeUnit()); + hash = (53 * hash) + timeUnit_; } if (hasSoftLimit()) { hash = (37 * hash) + SOFT_LIMIT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getSoftLimit()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getSoftLimit()); } if (hasShare()) { hash = (37 * hash) + SHARE_FIELD_NUMBER; - hash = (53 * hash) + Float.floatToIntBits( + hash = (53 * hash) + java.lang.Float.floatToIntBits( getShare()); } if (hasScope()) { hash = (37 * hash) + SCOPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getScope()); + hash = (53 * hash) + scope_; } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -670,46 +662,57 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -717,14 +720,15 @@ public final class QuotaProtos { * Protobuf type {@code hbase.pb.TimedQuota} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TimedQuota) + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_TimedQuota_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_TimedQuota_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -737,35 +741,28 @@ public final class QuotaProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - timeUnit_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS; + timeUnit_ = 1; bitField0_ = (bitField0_ & ~0x00000001); softLimit_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); share_ = 0F; bitField0_ = (bitField0_ & ~0x00000004); - scope_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.MACHINE; + scope_ = 2; bitField0_ = (bitField0_ & ~0x00000008); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_TimedQuota_descriptor; @@ -808,6 +805,32 @@ public final class QuotaProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota)other); @@ -831,13 +854,13 @@ public final class QuotaProtos { if (other.hasScope()) { setScope(other.getScope()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTimeUnit()) { - return false; } return true; @@ -852,7 +875,7 @@ public final class QuotaProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -862,8 +885,7 @@ public final class QuotaProtos { } private int bitField0_; - // required .hbase.pb.TimeUnit time_unit = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit timeUnit_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS; + private int timeUnit_ = 1; /** * required .hbase.pb.TimeUnit time_unit = 1; */ @@ -874,7 +896,8 @@ public final class QuotaProtos { * required .hbase.pb.TimeUnit time_unit = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit() { - return timeUnit_; + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.valueOf(timeUnit_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS : result; } /** * required .hbase.pb.TimeUnit time_unit = 1; @@ -884,7 +907,7 @@ public final class QuotaProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - timeUnit_ = value; + timeUnit_ = value.getNumber(); onChanged(); return this; } @@ -893,12 +916,11 @@ public final class QuotaProtos { */ public Builder clearTimeUnit() { bitField0_ = (bitField0_ & ~0x00000001); - timeUnit_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS; + timeUnit_ = 1; onChanged(); return this; } - // optional uint64 soft_limit = 2; private long softLimit_ ; /** * optional uint64 soft_limit = 2; @@ -931,7 +953,6 @@ public final class QuotaProtos { return this; } - // optional float share = 3; private float share_ ; /** * optional float share = 3; @@ -964,8 +985,7 @@ public final class QuotaProtos { return this; } - // optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE]; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope scope_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.MACHINE; + private int scope_ = 2; /** * optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE]; */ @@ -976,7 +996,8 @@ public final class QuotaProtos { * optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope getScope() { - return scope_; + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.valueOf(scope_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.MACHINE : result; } /** * optional .hbase.pb.QuotaScope scope = 4 [default = MACHINE]; @@ -986,7 +1007,7 @@ public final class QuotaProtos { throw new NullPointerException(); } bitField0_ |= 0x00000008; - scope_ = value; + scope_ = value.getNumber(); onChanged(); return this; } @@ -995,26 +1016,63 @@ public final class QuotaProtos { */ public Builder clearScope() { bitField0_ = (bitField0_ & ~0x00000008); - scope_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaScope.MACHINE; + scope_ = 2; onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TimedQuota) } + // @@protoc_insertion_point(class_scope:hbase.pb.TimedQuota) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota DEFAULT_INSTANCE; static { - defaultInstance = new TimedQuota(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TimedQuota parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TimedQuota(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TimedQuota) } - public interface ThrottleOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ThrottleOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Throttle) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.TimedQuota req_num = 1; /** * optional .hbase.pb.TimedQuota req_num = 1; */ @@ -1028,7 +1086,6 @@ public final class QuotaProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder(); - // optional .hbase.pb.TimedQuota req_size = 2; /** * optional .hbase.pb.TimedQuota req_size = 2; */ @@ -1042,7 +1099,6 @@ public final class QuotaProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder(); - // optional .hbase.pb.TimedQuota write_num = 3; /** * optional .hbase.pb.TimedQuota write_num = 3; */ @@ -1056,7 +1112,6 @@ public final class QuotaProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder(); - // optional .hbase.pb.TimedQuota write_size = 4; /** * optional .hbase.pb.TimedQuota write_size = 4; */ @@ -1070,7 +1125,6 @@ public final class QuotaProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder(); - // optional .hbase.pb.TimedQuota read_num = 5; /** * optional .hbase.pb.TimedQuota read_num = 5; */ @@ -1084,7 +1138,6 @@ public final class QuotaProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadNumOrBuilder(); - // optional .hbase.pb.TimedQuota read_size = 6; /** * optional .hbase.pb.TimedQuota read_size = 6; */ @@ -1101,36 +1154,27 @@ public final class QuotaProtos { /** * Protobuf type {@code hbase.pb.Throttle} */ - public static final class Throttle extends - com.google.protobuf.GeneratedMessage - implements ThrottleOrBuilder { + public static final class Throttle extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Throttle) + ThrottleOrBuilder { // Use Throttle.newBuilder() to construct. - private Throttle(com.google.protobuf.GeneratedMessage.Builder builder) { + private Throttle(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private Throttle(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Throttle defaultInstance; - public static Throttle getDefaultInstance() { - return defaultInstance; - } - - public Throttle getDefaultInstanceForType() { - return defaultInstance; + private Throttle() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Throttle( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1233,7 +1277,7 @@ public final class QuotaProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1244,30 +1288,14 @@ public final class QuotaProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Throttle_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Throttle_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Throttle parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Throttle(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.TimedQuota req_num = 1; public static final int REQ_NUM_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqNum_; /** @@ -1280,16 +1308,15 @@ public final class QuotaProtos { * optional .hbase.pb.TimedQuota req_num = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqNum() { - return reqNum_; + return reqNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqNum_; } /** * optional .hbase.pb.TimedQuota req_num = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder() { - return reqNum_; + return reqNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqNum_; } - // optional .hbase.pb.TimedQuota req_size = 2; public static final int REQ_SIZE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqSize_; /** @@ -1302,16 +1329,15 @@ public final class QuotaProtos { * optional .hbase.pb.TimedQuota req_size = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqSize() { - return reqSize_; + return reqSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqSize_; } /** * optional .hbase.pb.TimedQuota req_size = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder() { - return reqSize_; + return reqSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqSize_; } - // optional .hbase.pb.TimedQuota write_num = 3; public static final int WRITE_NUM_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeNum_; /** @@ -1324,16 +1350,15 @@ public final class QuotaProtos { * optional .hbase.pb.TimedQuota write_num = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteNum() { - return writeNum_; + return writeNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeNum_; } /** * optional .hbase.pb.TimedQuota write_num = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder() { - return writeNum_; + return writeNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeNum_; } - // optional .hbase.pb.TimedQuota write_size = 4; public static final int WRITE_SIZE_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeSize_; /** @@ -1346,16 +1371,15 @@ public final class QuotaProtos { * optional .hbase.pb.TimedQuota write_size = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteSize() { - return writeSize_; + return writeSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeSize_; } /** * optional .hbase.pb.TimedQuota write_size = 4; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder() { - return writeSize_; + return writeSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeSize_; } - // optional .hbase.pb.TimedQuota read_num = 5; public static final int READ_NUM_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readNum_; /** @@ -1368,16 +1392,15 @@ public final class QuotaProtos { * optional .hbase.pb.TimedQuota read_num = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadNum() { - return readNum_; + return readNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readNum_; } /** * optional .hbase.pb.TimedQuota read_num = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadNumOrBuilder() { - return readNum_; + return readNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readNum_; } - // optional .hbase.pb.TimedQuota read_size = 6; public static final int READ_SIZE_FIELD_NUMBER = 6; private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readSize_; /** @@ -1390,27 +1413,20 @@ public final class QuotaProtos { * optional .hbase.pb.TimedQuota read_size = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadSize() { - return readSize_; + return readSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readSize_; } /** * optional .hbase.pb.TimedQuota read_size = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadSizeOrBuilder() { - return readSize_; + return readSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readSize_; } - private void initFields() { - reqNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - reqSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - writeNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - writeSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - readNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - readSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasReqNum()) { if (!getReqNum().isInitialized()) { @@ -1454,71 +1470,63 @@ public final class QuotaProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, reqNum_); + output.writeMessage(1, getReqNum()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, reqSize_); + output.writeMessage(2, getReqSize()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, writeNum_); + output.writeMessage(3, getWriteNum()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(4, writeSize_); + output.writeMessage(4, getWriteSize()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, readNum_); + output.writeMessage(5, getReadNum()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeMessage(6, readSize_); + output.writeMessage(6, getReadSize()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, reqNum_); + .computeMessageSize(1, getReqNum()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, reqSize_); + .computeMessageSize(2, getReqSize()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, writeNum_); + .computeMessageSize(3, getWriteNum()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, writeSize_); + .computeMessageSize(4, getWriteSize()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, readNum_); + .computeMessageSize(5, getReadNum()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, readSize_); + .computeMessageSize(6, getReadSize()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1559,12 +1567,10 @@ public final class QuotaProtos { result = result && getReadSize() .equals(other.getReadSize()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1596,7 +1602,7 @@ public final class QuotaProtos { hash = (37 * hash) + READ_SIZE_FIELD_NUMBER; hash = (53 * hash) + getReadSize().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1624,46 +1630,57 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1671,14 +1688,15 @@ public final class QuotaProtos { * Protobuf type {@code hbase.pb.Throttle} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Throttle) + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Throttle_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Throttle_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1691,12 +1709,13 @@ public final class QuotaProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getReqNumFieldBuilder(); getReqSizeFieldBuilder(); getWriteNumFieldBuilder(); @@ -1705,44 +1724,40 @@ public final class QuotaProtos { getReadSizeFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (reqNumBuilder_ == null) { - reqNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + reqNum_ = null; } else { reqNumBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (reqSizeBuilder_ == null) { - reqSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + reqSize_ = null; } else { reqSizeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (writeNumBuilder_ == null) { - writeNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + writeNum_ = null; } else { writeNumBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); if (writeSizeBuilder_ == null) { - writeSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + writeSize_ = null; } else { writeSizeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); if (readNumBuilder_ == null) { - readNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + readNum_ = null; } else { readNumBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); if (readSizeBuilder_ == null) { - readSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + readSize_ = null; } else { readSizeBuilder_.clear(); } @@ -1750,10 +1765,6 @@ public final class QuotaProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Throttle_descriptor; @@ -1828,6 +1839,32 @@ public final class QuotaProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle)other); @@ -1857,44 +1894,39 @@ public final class QuotaProtos { if (other.hasReadSize()) { mergeReadSize(other.getReadSize()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasReqNum()) { if (!getReqNum().isInitialized()) { - return false; } } if (hasReqSize()) { if (!getReqSize().isInitialized()) { - return false; } } if (hasWriteNum()) { if (!getWriteNum().isInitialized()) { - return false; } } if (hasWriteSize()) { if (!getWriteSize().isInitialized()) { - return false; } } if (hasReadNum()) { if (!getReadNum().isInitialized()) { - return false; } } if (hasReadSize()) { if (!getReadSize().isInitialized()) { - return false; } } @@ -1910,7 +1942,7 @@ public final class QuotaProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1920,9 +1952,8 @@ public final class QuotaProtos { } private int bitField0_; - // optional .hbase.pb.TimedQuota req_num = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqNum_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> reqNumBuilder_; /** * optional .hbase.pb.TimedQuota req_num = 1; @@ -1935,7 +1966,7 @@ public final class QuotaProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqNum() { if (reqNumBuilder_ == null) { - return reqNum_; + return reqNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqNum_; } else { return reqNumBuilder_.getMessage(); } @@ -1976,6 +2007,7 @@ public final class QuotaProtos { public Builder mergeReqNum(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) { if (reqNumBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + reqNum_ != null && reqNum_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) { reqNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(reqNum_).mergeFrom(value).buildPartial(); @@ -1994,7 +2026,7 @@ public final class QuotaProtos { */ public Builder clearReqNum() { if (reqNumBuilder_ == null) { - reqNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + reqNum_ = null; onChanged(); } else { reqNumBuilder_.clear(); @@ -2017,19 +2049,20 @@ public final class QuotaProtos { if (reqNumBuilder_ != null) { return reqNumBuilder_.getMessageOrBuilder(); } else { - return reqNum_; + return reqNum_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqNum_; } } /** * optional .hbase.pb.TimedQuota req_num = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> getReqNumFieldBuilder() { if (reqNumBuilder_ == null) { - reqNumBuilder_ = new com.google.protobuf.SingleFieldBuilder< + reqNumBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>( - reqNum_, + getReqNum(), getParentForChildren(), isClean()); reqNum_ = null; @@ -2037,9 +2070,8 @@ public final class QuotaProtos { return reqNumBuilder_; } - // optional .hbase.pb.TimedQuota req_size = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota reqSize_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> reqSizeBuilder_; /** * optional .hbase.pb.TimedQuota req_size = 2; @@ -2052,7 +2084,7 @@ public final class QuotaProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReqSize() { if (reqSizeBuilder_ == null) { - return reqSize_; + return reqSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqSize_; } else { return reqSizeBuilder_.getMessage(); } @@ -2093,6 +2125,7 @@ public final class QuotaProtos { public Builder mergeReqSize(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) { if (reqSizeBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + reqSize_ != null && reqSize_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) { reqSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(reqSize_).mergeFrom(value).buildPartial(); @@ -2111,7 +2144,7 @@ public final class QuotaProtos { */ public Builder clearReqSize() { if (reqSizeBuilder_ == null) { - reqSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + reqSize_ = null; onChanged(); } else { reqSizeBuilder_.clear(); @@ -2134,19 +2167,20 @@ public final class QuotaProtos { if (reqSizeBuilder_ != null) { return reqSizeBuilder_.getMessageOrBuilder(); } else { - return reqSize_; + return reqSize_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : reqSize_; } } /** * optional .hbase.pb.TimedQuota req_size = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> getReqSizeFieldBuilder() { if (reqSizeBuilder_ == null) { - reqSizeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + reqSizeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>( - reqSize_, + getReqSize(), getParentForChildren(), isClean()); reqSize_ = null; @@ -2154,9 +2188,8 @@ public final class QuotaProtos { return reqSizeBuilder_; } - // optional .hbase.pb.TimedQuota write_num = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeNum_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> writeNumBuilder_; /** * optional .hbase.pb.TimedQuota write_num = 3; @@ -2169,7 +2202,7 @@ public final class QuotaProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteNum() { if (writeNumBuilder_ == null) { - return writeNum_; + return writeNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeNum_; } else { return writeNumBuilder_.getMessage(); } @@ -2210,6 +2243,7 @@ public final class QuotaProtos { public Builder mergeWriteNum(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) { if (writeNumBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + writeNum_ != null && writeNum_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) { writeNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(writeNum_).mergeFrom(value).buildPartial(); @@ -2228,7 +2262,7 @@ public final class QuotaProtos { */ public Builder clearWriteNum() { if (writeNumBuilder_ == null) { - writeNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + writeNum_ = null; onChanged(); } else { writeNumBuilder_.clear(); @@ -2251,19 +2285,20 @@ public final class QuotaProtos { if (writeNumBuilder_ != null) { return writeNumBuilder_.getMessageOrBuilder(); } else { - return writeNum_; + return writeNum_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeNum_; } } /** * optional .hbase.pb.TimedQuota write_num = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> getWriteNumFieldBuilder() { if (writeNumBuilder_ == null) { - writeNumBuilder_ = new com.google.protobuf.SingleFieldBuilder< + writeNumBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>( - writeNum_, + getWriteNum(), getParentForChildren(), isClean()); writeNum_ = null; @@ -2271,9 +2306,8 @@ public final class QuotaProtos { return writeNumBuilder_; } - // optional .hbase.pb.TimedQuota write_size = 4; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota writeSize_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> writeSizeBuilder_; /** * optional .hbase.pb.TimedQuota write_size = 4; @@ -2286,7 +2320,7 @@ public final class QuotaProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getWriteSize() { if (writeSizeBuilder_ == null) { - return writeSize_; + return writeSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeSize_; } else { return writeSizeBuilder_.getMessage(); } @@ -2327,6 +2361,7 @@ public final class QuotaProtos { public Builder mergeWriteSize(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) { if (writeSizeBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && + writeSize_ != null && writeSize_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) { writeSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(writeSize_).mergeFrom(value).buildPartial(); @@ -2345,7 +2380,7 @@ public final class QuotaProtos { */ public Builder clearWriteSize() { if (writeSizeBuilder_ == null) { - writeSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + writeSize_ = null; onChanged(); } else { writeSizeBuilder_.clear(); @@ -2368,19 +2403,20 @@ public final class QuotaProtos { if (writeSizeBuilder_ != null) { return writeSizeBuilder_.getMessageOrBuilder(); } else { - return writeSize_; + return writeSize_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : writeSize_; } } /** * optional .hbase.pb.TimedQuota write_size = 4; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> getWriteSizeFieldBuilder() { if (writeSizeBuilder_ == null) { - writeSizeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + writeSizeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>( - writeSize_, + getWriteSize(), getParentForChildren(), isClean()); writeSize_ = null; @@ -2388,9 +2424,8 @@ public final class QuotaProtos { return writeSizeBuilder_; } - // optional .hbase.pb.TimedQuota read_num = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readNum_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> readNumBuilder_; /** * optional .hbase.pb.TimedQuota read_num = 5; @@ -2403,7 +2438,7 @@ public final class QuotaProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadNum() { if (readNumBuilder_ == null) { - return readNum_; + return readNum_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readNum_; } else { return readNumBuilder_.getMessage(); } @@ -2444,6 +2479,7 @@ public final class QuotaProtos { public Builder mergeReadNum(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) { if (readNumBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && + readNum_ != null && readNum_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) { readNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(readNum_).mergeFrom(value).buildPartial(); @@ -2462,7 +2498,7 @@ public final class QuotaProtos { */ public Builder clearReadNum() { if (readNumBuilder_ == null) { - readNum_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + readNum_ = null; onChanged(); } else { readNumBuilder_.clear(); @@ -2485,19 +2521,20 @@ public final class QuotaProtos { if (readNumBuilder_ != null) { return readNumBuilder_.getMessageOrBuilder(); } else { - return readNum_; + return readNum_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readNum_; } } /** * optional .hbase.pb.TimedQuota read_num = 5; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> getReadNumFieldBuilder() { if (readNumBuilder_ == null) { - readNumBuilder_ = new com.google.protobuf.SingleFieldBuilder< + readNumBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>( - readNum_, + getReadNum(), getParentForChildren(), isClean()); readNum_ = null; @@ -2505,9 +2542,8 @@ public final class QuotaProtos { return readNumBuilder_; } - // optional .hbase.pb.TimedQuota read_size = 6; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota readSize_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> readSizeBuilder_; /** * optional .hbase.pb.TimedQuota read_size = 6; @@ -2520,7 +2556,7 @@ public final class QuotaProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getReadSize() { if (readSizeBuilder_ == null) { - return readSize_; + return readSize_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readSize_; } else { return readSizeBuilder_.getMessage(); } @@ -2561,6 +2597,7 @@ public final class QuotaProtos { public Builder mergeReadSize(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) { if (readSizeBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && + readSize_ != null && readSize_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) { readSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(readSize_).mergeFrom(value).buildPartial(); @@ -2579,7 +2616,7 @@ public final class QuotaProtos { */ public Builder clearReadSize() { if (readSizeBuilder_ == null) { - readSize_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + readSize_ = null; onChanged(); } else { readSizeBuilder_.clear(); @@ -2602,41 +2639,79 @@ public final class QuotaProtos { if (readSizeBuilder_ != null) { return readSizeBuilder_.getMessageOrBuilder(); } else { - return readSize_; + return readSize_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : readSize_; } } /** * optional .hbase.pb.TimedQuota read_size = 6; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> getReadSizeFieldBuilder() { if (readSizeBuilder_ == null) { - readSizeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + readSizeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>( - readSize_, + getReadSize(), getParentForChildren(), isClean()); readSize_ = null; } return readSizeBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Throttle) } + // @@protoc_insertion_point(class_scope:hbase.pb.Throttle) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle DEFAULT_INSTANCE; static { - defaultInstance = new Throttle(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Throttle parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Throttle(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Throttle) } - public interface ThrottleRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ThrottleRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ThrottleRequest) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.ThrottleType type = 1; /** * optional .hbase.pb.ThrottleType type = 1; */ @@ -2646,7 +2721,6 @@ public final class QuotaProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType getType(); - // optional .hbase.pb.TimedQuota timed_quota = 2; /** * optional .hbase.pb.TimedQuota timed_quota = 2; */ @@ -2663,36 +2737,28 @@ public final class QuotaProtos { /** * Protobuf type {@code hbase.pb.ThrottleRequest} */ - public static final class ThrottleRequest extends - com.google.protobuf.GeneratedMessage - implements ThrottleRequestOrBuilder { + public static final class ThrottleRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ThrottleRequest) + ThrottleRequestOrBuilder { // Use ThrottleRequest.newBuilder() to construct. - private ThrottleRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ThrottleRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ThrottleRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ThrottleRequest defaultInstance; - public static ThrottleRequest getDefaultInstance() { - return defaultInstance; + private ThrottleRequest() { + type_ = 1; } - public ThrottleRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ThrottleRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2718,7 +2784,7 @@ public final class QuotaProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - type_ = value; + type_ = rawValue; } break; } @@ -2741,7 +2807,7 @@ public final class QuotaProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2752,32 +2818,16 @@ public final class QuotaProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_ThrottleRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_ThrottleRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ThrottleRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ThrottleRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.ThrottleType type = 1; public static final int TYPE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType type_; + private int type_; /** * optional .hbase.pb.ThrottleType type = 1; */ @@ -2788,10 +2838,10 @@ public final class QuotaProtos { * optional .hbase.pb.ThrottleType type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType getType() { - return type_; + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.valueOf(type_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER : result; } - // optional .hbase.pb.TimedQuota timed_quota = 2; public static final int TIMED_QUOTA_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota timedQuota_; /** @@ -2804,23 +2854,20 @@ public final class QuotaProtos { * optional .hbase.pb.TimedQuota timed_quota = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getTimedQuota() { - return timedQuota_; + return timedQuota_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : timedQuota_; } /** * optional .hbase.pb.TimedQuota timed_quota = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getTimedQuotaOrBuilder() { - return timedQuota_; + return timedQuota_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : timedQuota_; } - private void initFields() { - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER; - timedQuota_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasTimedQuota()) { if (!getTimedQuota().isInitialized()) { @@ -2834,43 +2881,35 @@ public final class QuotaProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, type_.getNumber()); + output.writeEnum(1, type_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, timedQuota_); + output.writeMessage(2, getTimedQuota()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, type_.getNumber()); + .computeEnumSize(1, type_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, timedQuota_); + .computeMessageSize(2, getTimedQuota()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2883,20 +2922,17 @@ public final class QuotaProtos { boolean result = true; result = result && (hasType() == other.hasType()); if (hasType()) { - result = result && - (getType() == other.getType()); + result = result && type_ == other.type_; } result = result && (hasTimedQuota() == other.hasTimedQuota()); if (hasTimedQuota()) { result = result && getTimedQuota() .equals(other.getTimedQuota()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2906,13 +2942,13 @@ public final class QuotaProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getType()); + hash = (53 * hash) + type_; } if (hasTimedQuota()) { hash = (37 * hash) + TIMED_QUOTA_FIELD_NUMBER; hash = (53 * hash) + getTimedQuota().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2940,46 +2976,57 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2987,14 +3034,15 @@ public final class QuotaProtos { * Protobuf type {@code hbase.pb.ThrottleRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ThrottleRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_ThrottleRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_ThrottleRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3007,25 +3055,22 @@ public final class QuotaProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTimedQuotaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER; + type_ = 1; bitField0_ = (bitField0_ & ~0x00000001); if (timedQuotaBuilder_ == null) { - timedQuota_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + timedQuota_ = null; } else { timedQuotaBuilder_.clear(); } @@ -3033,10 +3078,6 @@ public final class QuotaProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_ThrottleRequest_descriptor; @@ -3075,6 +3116,32 @@ public final class QuotaProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest)other); @@ -3092,14 +3159,14 @@ public final class QuotaProtos { if (other.hasTimedQuota()) { mergeTimedQuota(other.getTimedQuota()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasTimedQuota()) { if (!getTimedQuota().isInitialized()) { - return false; } } @@ -3115,7 +3182,7 @@ public final class QuotaProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3125,8 +3192,7 @@ public final class QuotaProtos { } private int bitField0_; - // optional .hbase.pb.ThrottleType type = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER; + private int type_ = 1; /** * optional .hbase.pb.ThrottleType type = 1; */ @@ -3137,7 +3203,8 @@ public final class QuotaProtos { * optional .hbase.pb.ThrottleType type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType getType() { - return type_; + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType result = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.valueOf(type_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER : result; } /** * optional .hbase.pb.ThrottleType type = 1; @@ -3147,7 +3214,7 @@ public final class QuotaProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - type_ = value; + type_ = value.getNumber(); onChanged(); return this; } @@ -3156,14 +3223,13 @@ public final class QuotaProtos { */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER; + type_ = 1; onChanged(); return this; } - // optional .hbase.pb.TimedQuota timed_quota = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota timedQuota_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota timedQuota_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> timedQuotaBuilder_; /** * optional .hbase.pb.TimedQuota timed_quota = 2; @@ -3176,7 +3242,7 @@ public final class QuotaProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota getTimedQuota() { if (timedQuotaBuilder_ == null) { - return timedQuota_; + return timedQuota_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : timedQuota_; } else { return timedQuotaBuilder_.getMessage(); } @@ -3217,6 +3283,7 @@ public final class QuotaProtos { public Builder mergeTimedQuota(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota value) { if (timedQuotaBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + timedQuota_ != null && timedQuota_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) { timedQuota_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(timedQuota_).mergeFrom(value).buildPartial(); @@ -3235,7 +3302,7 @@ public final class QuotaProtos { */ public Builder clearTimedQuota() { if (timedQuotaBuilder_ == null) { - timedQuota_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance(); + timedQuota_ = null; onChanged(); } else { timedQuotaBuilder_.clear(); @@ -3258,41 +3325,79 @@ public final class QuotaProtos { if (timedQuotaBuilder_ != null) { return timedQuotaBuilder_.getMessageOrBuilder(); } else { - return timedQuota_; + return timedQuota_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance() : timedQuota_; } } /** * optional .hbase.pb.TimedQuota timed_quota = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> getTimedQuotaFieldBuilder() { if (timedQuotaBuilder_ == null) { - timedQuotaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + timedQuotaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>( - timedQuota_, + getTimedQuota(), getParentForChildren(), isClean()); timedQuota_ = null; } return timedQuotaBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ThrottleRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ThrottleRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest DEFAULT_INSTANCE; static { - defaultInstance = new ThrottleRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ThrottleRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ThrottleRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ThrottleRequest) } - public interface QuotasOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface QuotasOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Quotas) + com.google.protobuf.MessageOrBuilder { - // optional bool bypass_globals = 1 [default = false]; /** * optional bool bypass_globals = 1 [default = false]; */ @@ -3302,7 +3407,6 @@ public final class QuotaProtos { */ boolean getBypassGlobals(); - // optional .hbase.pb.Throttle throttle = 2; /** * optional .hbase.pb.Throttle throttle = 2; */ @@ -3319,36 +3423,28 @@ public final class QuotaProtos { /** * Protobuf type {@code hbase.pb.Quotas} */ - public static final class Quotas extends - com.google.protobuf.GeneratedMessage - implements QuotasOrBuilder { + public static final class Quotas extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Quotas) + QuotasOrBuilder { // Use Quotas.newBuilder() to construct. - private Quotas(com.google.protobuf.GeneratedMessage.Builder builder) { + private Quotas(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private Quotas(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Quotas defaultInstance; - public static Quotas getDefaultInstance() { - return defaultInstance; - } - - public Quotas getDefaultInstanceForType() { - return defaultInstance; + private Quotas() { + bypassGlobals_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Quotas( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3391,7 +3487,7 @@ public final class QuotaProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -3402,30 +3498,14 @@ public final class QuotaProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Quotas_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Quotas_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Quotas parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Quotas(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool bypass_globals = 1 [default = false]; public static final int BYPASS_GLOBALS_FIELD_NUMBER = 1; private boolean bypassGlobals_; /** @@ -3441,7 +3521,6 @@ public final class QuotaProtos { return bypassGlobals_; } - // optional .hbase.pb.Throttle throttle = 2; public static final int THROTTLE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle throttle_; /** @@ -3454,23 +3533,20 @@ public final class QuotaProtos { * optional .hbase.pb.Throttle throttle = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle getThrottle() { - return throttle_; + return throttle_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance() : throttle_; } /** * optional .hbase.pb.Throttle throttle = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder getThrottleOrBuilder() { - return throttle_; + return throttle_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance() : throttle_; } - private void initFields() { - bypassGlobals_ = false; - throttle_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasThrottle()) { if (!getThrottle().isInitialized()) { @@ -3484,19 +3560,17 @@ public final class QuotaProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, bypassGlobals_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, throttle_); + output.writeMessage(2, getThrottle()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -3506,21 +3580,15 @@ public final class QuotaProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, throttle_); + .computeMessageSize(2, getThrottle()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3541,12 +3609,10 @@ public final class QuotaProtos { result = result && getThrottle() .equals(other.getThrottle()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3556,13 +3622,14 @@ public final class QuotaProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBypassGlobals()) { hash = (37 * hash) + BYPASS_GLOBALS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getBypassGlobals()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getBypassGlobals()); } if (hasThrottle()) { hash = (37 * hash) + THROTTLE_FIELD_NUMBER; hash = (53 * hash) + getThrottle().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3590,46 +3657,57 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3637,14 +3715,15 @@ public final class QuotaProtos { * Protobuf type {@code hbase.pb.Quotas} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotasOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Quotas) + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotasOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Quotas_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Quotas_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3657,25 +3736,22 @@ public final class QuotaProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getThrottleFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); bypassGlobals_ = false; bitField0_ = (bitField0_ & ~0x00000001); if (throttleBuilder_ == null) { - throttle_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance(); + throttle_ = null; } else { throttleBuilder_.clear(); } @@ -3683,10 +3759,6 @@ public final class QuotaProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_Quotas_descriptor; @@ -3725,6 +3797,32 @@ public final class QuotaProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas)other); @@ -3742,14 +3840,14 @@ public final class QuotaProtos { if (other.hasThrottle()) { mergeThrottle(other.getThrottle()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasThrottle()) { if (!getThrottle().isInitialized()) { - return false; } } @@ -3765,7 +3863,7 @@ public final class QuotaProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3775,7 +3873,6 @@ public final class QuotaProtos { } private int bitField0_; - // optional bool bypass_globals = 1 [default = false]; private boolean bypassGlobals_ ; /** * optional bool bypass_globals = 1 [default = false]; @@ -3808,9 +3905,8 @@ public final class QuotaProtos { return this; } - // optional .hbase.pb.Throttle throttle = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle throttle_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle throttle_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder> throttleBuilder_; /** * optional .hbase.pb.Throttle throttle = 2; @@ -3823,7 +3919,7 @@ public final class QuotaProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle getThrottle() { if (throttleBuilder_ == null) { - return throttle_; + return throttle_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance() : throttle_; } else { return throttleBuilder_.getMessage(); } @@ -3864,6 +3960,7 @@ public final class QuotaProtos { public Builder mergeThrottle(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle value) { if (throttleBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + throttle_ != null && throttle_ != org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance()) { throttle_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.newBuilder(throttle_).mergeFrom(value).buildPartial(); @@ -3882,7 +3979,7 @@ public final class QuotaProtos { */ public Builder clearThrottle() { if (throttleBuilder_ == null) { - throttle_ = org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance(); + throttle_ = null; onChanged(); } else { throttleBuilder_.clear(); @@ -3905,73 +4002,103 @@ public final class QuotaProtos { if (throttleBuilder_ != null) { return throttleBuilder_.getMessageOrBuilder(); } else { - return throttle_; + return throttle_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance() : throttle_; } } /** * optional .hbase.pb.Throttle throttle = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder> getThrottleFieldBuilder() { if (throttleBuilder_ == null) { - throttleBuilder_ = new com.google.protobuf.SingleFieldBuilder< + throttleBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleOrBuilder>( - throttle_, + getThrottle(), getParentForChildren(), isClean()); throttle_ = null; } return throttleBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Quotas) } + // @@protoc_insertion_point(class_scope:hbase.pb.Quotas) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas DEFAULT_INSTANCE; static { - defaultInstance = new Quotas(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Quotas parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Quotas(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Quotas) } - public interface QuotaUsageOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface QuotaUsageOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.QuotaUsage) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.QuotaUsage} */ - public static final class QuotaUsage extends - com.google.protobuf.GeneratedMessage - implements QuotaUsageOrBuilder { + public static final class QuotaUsage extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.QuotaUsage) + QuotaUsageOrBuilder { // Use QuotaUsage.newBuilder() to construct. - private QuotaUsage(com.google.protobuf.GeneratedMessage.Builder builder) { + private QuotaUsage(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private QuotaUsage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final QuotaUsage defaultInstance; - public static QuotaUsage getDefaultInstance() { - return defaultInstance; - } - - public QuotaUsage getDefaultInstanceForType() { - return defaultInstance; + private QuotaUsage() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private QuotaUsage( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -3995,7 +4122,7 @@ public final class QuotaProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -4006,34 +4133,18 @@ public final class QuotaProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_QuotaUsage_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_QuotaUsage_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage.class, org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public QuotaUsage parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new QuotaUsage(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -4041,29 +4152,21 @@ public final class QuotaProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4074,12 +4177,10 @@ public final class QuotaProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage other = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4087,7 +4188,7 @@ public final class QuotaProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4115,46 +4216,57 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4162,14 +4274,15 @@ public final class QuotaProtos { * Protobuf type {@code hbase.pb.QuotaUsage} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsageOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.QuotaUsage) + org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_QuotaUsage_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_QuotaUsage_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4182,27 +4295,20 @@ public final class QuotaProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.internal_static_hbase_pb_QuotaUsage_descriptor; @@ -4226,6 +4332,32 @@ public final class QuotaProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage)other); @@ -4237,7 +4369,8 @@ public final class QuotaProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -4254,7 +4387,7 @@ public final class QuotaProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4262,49 +4395,86 @@ public final class QuotaProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.QuotaUsage) } + // @@protoc_insertion_point(class_scope:hbase.pb.QuotaUsage) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage DEFAULT_INSTANCE; static { - defaultInstance = new QuotaUsage(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public QuotaUsage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new QuotaUsage(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.QuotaUsage) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TimedQuota_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TimedQuota_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Throttle_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Throttle_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ThrottleRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ThrottleRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Quotas_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Quotas_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_QuotaUsage_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_QuotaUsage_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -4333,48 +4503,49 @@ public final class QuotaProtos { "\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_TimedQuota_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_TimedQuota_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TimedQuota_descriptor, - new java.lang.String[] { "TimeUnit", "SoftLimit", "Share", "Scope", }); - internal_static_hbase_pb_Throttle_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_Throttle_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Throttle_descriptor, - new java.lang.String[] { "ReqNum", "ReqSize", "WriteNum", "WriteSize", "ReadNum", "ReadSize", }); - internal_static_hbase_pb_ThrottleRequest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_ThrottleRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ThrottleRequest_descriptor, - new java.lang.String[] { "Type", "TimedQuota", }); - internal_static_hbase_pb_Quotas_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_Quotas_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Quotas_descriptor, - new java.lang.String[] { "BypassGlobals", "Throttle", }); - internal_static_hbase_pb_QuotaUsage_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_QuotaUsage_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_QuotaUsage_descriptor, - new java.lang.String[] { }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_TimedQuota_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_TimedQuota_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TimedQuota_descriptor, + new java.lang.String[] { "TimeUnit", "SoftLimit", "Share", "Scope", }); + internal_static_hbase_pb_Throttle_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_Throttle_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Throttle_descriptor, + new java.lang.String[] { "ReqNum", "ReqSize", "WriteNum", "WriteSize", "ReadNum", "ReadSize", }); + internal_static_hbase_pb_ThrottleRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_ThrottleRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ThrottleRequest_descriptor, + new java.lang.String[] { "Type", "TimedQuota", }); + internal_static_hbase_pb_Quotas_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_Quotas_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Quotas_descriptor, + new java.lang.String[] { "BypassGlobals", "Throttle", }); + internal_static_hbase_pb_QuotaUsage_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_QuotaUsage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_QuotaUsage_descriptor, + new java.lang.String[] { }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java index 222f383..ead61ed 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class RPCProtos { private RPCProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface UserInformationOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface UserInformationOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.UserInformation) + com.google.protobuf.MessageOrBuilder { - // required string effective_user = 1; /** * required string effective_user = 1; */ @@ -26,7 +32,6 @@ public final class RPCProtos { com.google.protobuf.ByteString getEffectiveUserBytes(); - // optional string real_user = 2; /** * optional string real_user = 2; */ @@ -42,42 +47,35 @@ public final class RPCProtos { getRealUserBytes(); } /** - * Protobuf type {@code hbase.pb.UserInformation} - * *
    * User Information proto.  Included in ConnectionHeader on connection setup
    * 
+ * + * Protobuf type {@code hbase.pb.UserInformation} */ - public static final class UserInformation extends - com.google.protobuf.GeneratedMessage - implements UserInformationOrBuilder { + public static final class UserInformation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.UserInformation) + UserInformationOrBuilder { // Use UserInformation.newBuilder() to construct. - private UserInformation(com.google.protobuf.GeneratedMessage.Builder builder) { + private UserInformation(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private UserInformation(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final UserInformation defaultInstance; - public static UserInformation getDefaultInstance() { - return defaultInstance; } - - public UserInformation getDefaultInstanceForType() { - return defaultInstance; + private UserInformation() { + effectiveUser_ = ""; + realUser_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private UserInformation( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -97,13 +95,15 @@ public final class RPCProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - effectiveUser_ = input.readBytes(); + effectiveUser_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - realUser_ = input.readBytes(); + realUser_ = bs; break; } } @@ -112,7 +112,7 @@ public final class RPCProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -123,32 +123,16 @@ public final class RPCProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public UserInformation parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new UserInformation(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string effective_user = 1; public static final int EFFECTIVE_USER_FIELD_NUMBER = 1; - private java.lang.Object effectiveUser_; + private volatile java.lang.Object effectiveUser_; /** * required string effective_user = 1; */ @@ -189,9 +173,8 @@ public final class RPCProtos { } } - // optional string real_user = 2; public static final int REAL_USER_FIELD_NUMBER = 2; - private java.lang.Object realUser_; + private volatile java.lang.Object realUser_; /** * optional string real_user = 2; */ @@ -232,14 +215,11 @@ public final class RPCProtos { } } - private void initFields() { - effectiveUser_ = ""; - realUser_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasEffectiveUser()) { memoizedIsInitialized = 0; @@ -251,43 +231,33 @@ public final class RPCProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getEffectiveUserBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, effectiveUser_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getRealUserBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, realUser_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getEffectiveUserBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, effectiveUser_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getRealUserBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, realUser_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -308,12 +278,10 @@ public final class RPCProtos { result = result && getRealUser() .equals(other.getRealUser()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -329,7 +297,7 @@ public final class RPCProtos { hash = (37 * hash) + REAL_USER_FIELD_NUMBER; hash = (53 * hash) + getRealUser().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -357,65 +325,77 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.UserInformation} - * *
      * User Information proto.  Included in ConnectionHeader on connection setup
      * 
+ * + * Protobuf type {@code hbase.pb.UserInformation} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.UserInformation) + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -428,18 +408,15 @@ public final class RPCProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); effectiveUser_ = ""; @@ -449,10 +426,6 @@ public final class RPCProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor; @@ -487,6 +460,32 @@ public final class RPCProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation)other); @@ -508,13 +507,13 @@ public final class RPCProtos { realUser_ = other.realUser_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasEffectiveUser()) { - return false; } return true; @@ -529,7 +528,7 @@ public final class RPCProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -539,7 +538,6 @@ public final class RPCProtos { } private int bitField0_; - // required string effective_user = 1; private java.lang.Object effectiveUser_ = ""; /** * required string effective_user = 1; @@ -553,9 +551,12 @@ public final class RPCProtos { public java.lang.String getEffectiveUser() { java.lang.Object ref = effectiveUser_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - effectiveUser_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + effectiveUser_ = s; + } return s; } else { return (java.lang.String) ref; @@ -613,7 +614,6 @@ public final class RPCProtos { return this; } - // optional string real_user = 2; private java.lang.Object realUser_ = ""; /** * optional string real_user = 2; @@ -627,9 +627,12 @@ public final class RPCProtos { public java.lang.String getRealUser() { java.lang.Object ref = realUser_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - realUser_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + realUser_ = s; + } return s; } else { return (java.lang.String) ref; @@ -686,22 +689,59 @@ public final class RPCProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.UserInformation) } + // @@protoc_insertion_point(class_scope:hbase.pb.UserInformation) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation DEFAULT_INSTANCE; static { - defaultInstance = new UserInformation(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public UserInformation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UserInformation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.UserInformation) } - public interface ConnectionHeaderOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ConnectionHeaderOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ConnectionHeader) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.UserInformation user_info = 1; /** * optional .hbase.pb.UserInformation user_info = 1; */ @@ -715,7 +755,6 @@ public final class RPCProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); - // optional string service_name = 2; /** * optional string service_name = 2; */ @@ -730,67 +769,64 @@ public final class RPCProtos { com.google.protobuf.ByteString getServiceNameBytes(); - // optional string cell_block_codec_class = 3; /** - * optional string cell_block_codec_class = 3; - * *
      * Cell block codec we will use sending over optional cell blocks.  Server throws exception
      * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
      * 
+ * + * optional string cell_block_codec_class = 3; */ boolean hasCellBlockCodecClass(); /** - * optional string cell_block_codec_class = 3; - * *
      * Cell block codec we will use sending over optional cell blocks.  Server throws exception
      * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
      * 
+ * + * optional string cell_block_codec_class = 3; */ java.lang.String getCellBlockCodecClass(); /** - * optional string cell_block_codec_class = 3; - * *
      * Cell block codec we will use sending over optional cell blocks.  Server throws exception
      * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
      * 
+ * + * optional string cell_block_codec_class = 3; */ com.google.protobuf.ByteString getCellBlockCodecClassBytes(); - // optional string cell_block_compressor_class = 4; /** - * optional string cell_block_compressor_class = 4; - * *
      * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
      * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
      * 
+ * + * optional string cell_block_compressor_class = 4; */ boolean hasCellBlockCompressorClass(); /** - * optional string cell_block_compressor_class = 4; - * *
      * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
      * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
      * 
+ * + * optional string cell_block_compressor_class = 4; */ java.lang.String getCellBlockCompressorClass(); /** - * optional string cell_block_compressor_class = 4; - * *
      * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
      * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
      * 
+ * + * optional string cell_block_compressor_class = 4; */ com.google.protobuf.ByteString getCellBlockCompressorClassBytes(); - // optional .hbase.pb.VersionInfo version_info = 5; /** * optional .hbase.pb.VersionInfo version_info = 5; */ @@ -805,42 +841,36 @@ public final class RPCProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder(); } /** - * Protobuf type {@code hbase.pb.ConnectionHeader} - * *
    * This is sent on connection setup after the connection preamble is sent.
    * 
+ * + * Protobuf type {@code hbase.pb.ConnectionHeader} */ - public static final class ConnectionHeader extends - com.google.protobuf.GeneratedMessage - implements ConnectionHeaderOrBuilder { + public static final class ConnectionHeader extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ConnectionHeader) + ConnectionHeaderOrBuilder { // Use ConnectionHeader.newBuilder() to construct. - private ConnectionHeader(com.google.protobuf.GeneratedMessage.Builder builder) { + private ConnectionHeader(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private ConnectionHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ConnectionHeader defaultInstance; - public static ConnectionHeader getDefaultInstance() { - return defaultInstance; - } - - public ConnectionHeader getDefaultInstanceForType() { - return defaultInstance; + private ConnectionHeader() { + serviceName_ = ""; + cellBlockCodecClass_ = ""; + cellBlockCompressorClass_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ConnectionHeader( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -873,18 +903,21 @@ public final class RPCProtos { break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - serviceName_ = input.readBytes(); + serviceName_ = bs; break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - cellBlockCodecClass_ = input.readBytes(); + cellBlockCodecClass_ = bs; break; } case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; - cellBlockCompressorClass_ = input.readBytes(); + cellBlockCompressorClass_ = bs; break; } case 42: { @@ -906,7 +939,7 @@ public final class RPCProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -917,30 +950,14 @@ public final class RPCProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ConnectionHeader parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ConnectionHeader(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** @@ -953,18 +970,17 @@ public final class RPCProtos { * optional .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * optional .hbase.pb.UserInformation user_info = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } - // optional string service_name = 2; public static final int SERVICE_NAME_FIELD_NUMBER = 2; - private java.lang.Object serviceName_; + private volatile java.lang.Object serviceName_; /** * optional string service_name = 2; */ @@ -1005,27 +1021,26 @@ public final class RPCProtos { } } - // optional string cell_block_codec_class = 3; public static final int CELL_BLOCK_CODEC_CLASS_FIELD_NUMBER = 3; - private java.lang.Object cellBlockCodecClass_; + private volatile java.lang.Object cellBlockCodecClass_; /** - * optional string cell_block_codec_class = 3; - * *
      * Cell block codec we will use sending over optional cell blocks.  Server throws exception
      * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
      * 
+ * + * optional string cell_block_codec_class = 3; */ public boolean hasCellBlockCodecClass() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional string cell_block_codec_class = 3; - * *
      * Cell block codec we will use sending over optional cell blocks.  Server throws exception
      * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
      * 
+ * + * optional string cell_block_codec_class = 3; */ public java.lang.String getCellBlockCodecClass() { java.lang.Object ref = cellBlockCodecClass_; @@ -1042,12 +1057,12 @@ public final class RPCProtos { } } /** - * optional string cell_block_codec_class = 3; - * *
      * Cell block codec we will use sending over optional cell blocks.  Server throws exception
      * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
      * 
+ * + * optional string cell_block_codec_class = 3; */ public com.google.protobuf.ByteString getCellBlockCodecClassBytes() { @@ -1063,27 +1078,26 @@ public final class RPCProtos { } } - // optional string cell_block_compressor_class = 4; public static final int CELL_BLOCK_COMPRESSOR_CLASS_FIELD_NUMBER = 4; - private java.lang.Object cellBlockCompressorClass_; + private volatile java.lang.Object cellBlockCompressorClass_; /** - * optional string cell_block_compressor_class = 4; - * *
      * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
      * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
      * 
+ * + * optional string cell_block_compressor_class = 4; */ public boolean hasCellBlockCompressorClass() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional string cell_block_compressor_class = 4; - * *
      * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
      * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
      * 
+ * + * optional string cell_block_compressor_class = 4; */ public java.lang.String getCellBlockCompressorClass() { java.lang.Object ref = cellBlockCompressorClass_; @@ -1100,12 +1114,12 @@ public final class RPCProtos { } } /** - * optional string cell_block_compressor_class = 4; - * *
      * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
      * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
      * 
+ * + * optional string cell_block_compressor_class = 4; */ public com.google.protobuf.ByteString getCellBlockCompressorClassBytes() { @@ -1121,7 +1135,6 @@ public final class RPCProtos { } } - // optional .hbase.pb.VersionInfo version_info = 5; public static final int VERSION_INFO_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_; /** @@ -1134,26 +1147,20 @@ public final class RPCProtos { * optional .hbase.pb.VersionInfo version_info = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { - return versionInfo_; + return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } /** * optional .hbase.pb.VersionInfo version_info = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() { - return versionInfo_; + return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } - private void initFields() { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - serviceName_ = ""; - cellBlockCodecClass_ = ""; - cellBlockCompressorClass_ = ""; - versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasUserInfo()) { if (!getUserInfo().isInitialized()) { @@ -1173,64 +1180,53 @@ public final class RPCProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, userInfo_); + output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getServiceNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, serviceName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getCellBlockCodecClassBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, cellBlockCodecClass_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, getCellBlockCompressorClassBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, cellBlockCompressorClass_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, versionInfo_); + output.writeMessage(5, getVersionInfo()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, userInfo_); + .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getServiceNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, serviceName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getCellBlockCodecClassBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, cellBlockCodecClass_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getCellBlockCompressorClassBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, cellBlockCompressorClass_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, versionInfo_); + .computeMessageSize(5, getVersionInfo()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1266,12 +1262,10 @@ public final class RPCProtos { result = result && getVersionInfo() .equals(other.getVersionInfo()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1299,7 +1293,7 @@ public final class RPCProtos { hash = (37 * hash) + VERSION_INFO_FIELD_NUMBER; hash = (53 * hash) + getVersionInfo().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1327,65 +1321,77 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ConnectionHeader} - * *
      * This is sent on connection setup after the connection preamble is sent.
      * 
+ * + * Protobuf type {@code hbase.pb.ConnectionHeader} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ConnectionHeader) + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1398,24 +1404,21 @@ public final class RPCProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getVersionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; } else { userInfoBuilder_.clear(); } @@ -1427,7 +1430,7 @@ public final class RPCProtos { cellBlockCompressorClass_ = ""; bitField0_ = (bitField0_ & ~0x00000008); if (versionInfoBuilder_ == null) { - versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); + versionInfo_ = null; } else { versionInfoBuilder_.clear(); } @@ -1435,10 +1438,6 @@ public final class RPCProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor; @@ -1493,6 +1492,32 @@ public final class RPCProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader)other); @@ -1525,20 +1550,19 @@ public final class RPCProtos { if (other.hasVersionInfo()) { mergeVersionInfo(other.getVersionInfo()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasUserInfo()) { if (!getUserInfo().isInitialized()) { - return false; } } if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { - return false; } } @@ -1554,7 +1578,7 @@ public final class RPCProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1564,9 +1588,8 @@ public final class RPCProtos { } private int bitField0_; - // optional .hbase.pb.UserInformation user_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * optional .hbase.pb.UserInformation user_info = 1; @@ -1579,7 +1602,7 @@ public final class RPCProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { - return userInfo_; + return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } @@ -1620,6 +1643,7 @@ public final class RPCProtos { public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); @@ -1638,7 +1662,7 @@ public final class RPCProtos { */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { - userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); + userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); @@ -1661,19 +1685,20 @@ public final class RPCProtos { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { - return userInfo_; + return userInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * optional .hbase.pb.UserInformation user_info = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { - userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( - userInfo_, + getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; @@ -1681,7 +1706,6 @@ public final class RPCProtos { return userInfoBuilder_; } - // optional string service_name = 2; private java.lang.Object serviceName_ = ""; /** * optional string service_name = 2; @@ -1695,9 +1719,12 @@ public final class RPCProtos { public java.lang.String getServiceName() { java.lang.Object ref = serviceName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - serviceName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + serviceName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -1755,45 +1782,47 @@ public final class RPCProtos { return this; } - // optional string cell_block_codec_class = 3; private java.lang.Object cellBlockCodecClass_ = ""; /** - * optional string cell_block_codec_class = 3; - * *
        * Cell block codec we will use sending over optional cell blocks.  Server throws exception
        * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
        * 
+ * + * optional string cell_block_codec_class = 3; */ public boolean hasCellBlockCodecClass() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional string cell_block_codec_class = 3; - * *
        * Cell block codec we will use sending over optional cell blocks.  Server throws exception
        * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
        * 
+ * + * optional string cell_block_codec_class = 3; */ public java.lang.String getCellBlockCodecClass() { java.lang.Object ref = cellBlockCodecClass_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - cellBlockCodecClass_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + cellBlockCodecClass_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * optional string cell_block_codec_class = 3; - * *
        * Cell block codec we will use sending over optional cell blocks.  Server throws exception
        * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
        * 
+ * + * optional string cell_block_codec_class = 3; */ public com.google.protobuf.ByteString getCellBlockCodecClassBytes() { @@ -1809,12 +1838,12 @@ public final class RPCProtos { } } /** - * optional string cell_block_codec_class = 3; - * *
        * Cell block codec we will use sending over optional cell blocks.  Server throws exception
        * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
        * 
+ * + * optional string cell_block_codec_class = 3; */ public Builder setCellBlockCodecClass( java.lang.String value) { @@ -1827,12 +1856,12 @@ public final class RPCProtos { return this; } /** - * optional string cell_block_codec_class = 3; - * *
        * Cell block codec we will use sending over optional cell blocks.  Server throws exception
        * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
        * 
+ * + * optional string cell_block_codec_class = 3; */ public Builder clearCellBlockCodecClass() { bitField0_ = (bitField0_ & ~0x00000004); @@ -1841,12 +1870,12 @@ public final class RPCProtos { return this; } /** - * optional string cell_block_codec_class = 3; - * *
        * Cell block codec we will use sending over optional cell blocks.  Server throws exception
        * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
        * 
+ * + * optional string cell_block_codec_class = 3; */ public Builder setCellBlockCodecClassBytes( com.google.protobuf.ByteString value) { @@ -1859,45 +1888,47 @@ public final class RPCProtos { return this; } - // optional string cell_block_compressor_class = 4; private java.lang.Object cellBlockCompressorClass_ = ""; /** - * optional string cell_block_compressor_class = 4; - * *
        * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
        * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
        * 
+ * + * optional string cell_block_compressor_class = 4; */ public boolean hasCellBlockCompressorClass() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional string cell_block_compressor_class = 4; - * *
        * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
        * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
        * 
+ * + * optional string cell_block_compressor_class = 4; */ public java.lang.String getCellBlockCompressorClass() { java.lang.Object ref = cellBlockCompressorClass_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - cellBlockCompressorClass_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + cellBlockCompressorClass_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * optional string cell_block_compressor_class = 4; - * *
        * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
        * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
        * 
+ * + * optional string cell_block_compressor_class = 4; */ public com.google.protobuf.ByteString getCellBlockCompressorClassBytes() { @@ -1913,12 +1944,12 @@ public final class RPCProtos { } } /** - * optional string cell_block_compressor_class = 4; - * *
        * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
        * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
        * 
+ * + * optional string cell_block_compressor_class = 4; */ public Builder setCellBlockCompressorClass( java.lang.String value) { @@ -1931,12 +1962,12 @@ public final class RPCProtos { return this; } /** - * optional string cell_block_compressor_class = 4; - * *
        * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
        * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
        * 
+ * + * optional string cell_block_compressor_class = 4; */ public Builder clearCellBlockCompressorClass() { bitField0_ = (bitField0_ & ~0x00000008); @@ -1945,12 +1976,12 @@ public final class RPCProtos { return this; } /** - * optional string cell_block_compressor_class = 4; - * *
        * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
        * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
        * 
+ * + * optional string cell_block_compressor_class = 4; */ public Builder setCellBlockCompressorClassBytes( com.google.protobuf.ByteString value) { @@ -1963,9 +1994,8 @@ public final class RPCProtos { return this; } - // optional .hbase.pb.VersionInfo version_info = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> versionInfoBuilder_; /** * optional .hbase.pb.VersionInfo version_info = 5; @@ -1978,7 +2008,7 @@ public final class RPCProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { if (versionInfoBuilder_ == null) { - return versionInfo_; + return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } else { return versionInfoBuilder_.getMessage(); } @@ -2019,6 +2049,7 @@ public final class RPCProtos { public Builder mergeVersionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo value) { if (versionInfoBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && + versionInfo_ != null && versionInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance()) { versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.newBuilder(versionInfo_).mergeFrom(value).buildPartial(); @@ -2037,7 +2068,7 @@ public final class RPCProtos { */ public Builder clearVersionInfo() { if (versionInfoBuilder_ == null) { - versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); + versionInfo_ = null; onChanged(); } else { versionInfoBuilder_.clear(); @@ -2060,95 +2091,125 @@ public final class RPCProtos { if (versionInfoBuilder_ != null) { return versionInfoBuilder_.getMessageOrBuilder(); } else { - return versionInfo_; + return versionInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } } /** * optional .hbase.pb.VersionInfo version_info = 5; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> getVersionInfoFieldBuilder() { if (versionInfoBuilder_ == null) { - versionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + versionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder>( - versionInfo_, + getVersionInfo(), getParentForChildren(), isClean()); versionInfo_ = null; } return versionInfoBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ConnectionHeader) } + // @@protoc_insertion_point(class_scope:hbase.pb.ConnectionHeader) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader DEFAULT_INSTANCE; static { - defaultInstance = new ConnectionHeader(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader(); } - // @@protoc_insertion_point(class_scope:hbase.pb.ConnectionHeader) - } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstance() { + return DEFAULT_INSTANCE; + } - public interface CellBlockMetaOrBuilder - extends com.google.protobuf.MessageOrBuilder { + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ConnectionHeader parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ConnectionHeader(input, extensionRegistry); + } + }; - // optional uint32 length = 1; - /** - * optional uint32 length = 1; - * - *
-     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+    public static com.google.protobuf.Parser parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser getParserForType() {
+      return PARSER;
+    }
+
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  public interface CellBlockMetaOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.CellBlockMeta)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * 
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
      * 
+ * + * optional uint32 length = 1; */ boolean hasLength(); /** - * optional uint32 length = 1; - * *
      * Length of the following cell block.  Could calculate it but convenient having it too hand.
      * 
+ * + * optional uint32 length = 1; */ int getLength(); } /** - * Protobuf type {@code hbase.pb.CellBlockMeta} - * *
    * Optional Cell block Message.  Included in client RequestHeader
    * 
+ * + * Protobuf type {@code hbase.pb.CellBlockMeta} */ - public static final class CellBlockMeta extends - com.google.protobuf.GeneratedMessage - implements CellBlockMetaOrBuilder { + public static final class CellBlockMeta extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CellBlockMeta) + CellBlockMetaOrBuilder { // Use CellBlockMeta.newBuilder() to construct. - private CellBlockMeta(com.google.protobuf.GeneratedMessage.Builder builder) { + private CellBlockMeta(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CellBlockMeta(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CellBlockMeta defaultInstance; - public static CellBlockMeta getDefaultInstance() { - return defaultInstance; } - - public CellBlockMeta getDefaultInstanceForType() { - return defaultInstance; + private CellBlockMeta() { + length_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CellBlockMeta( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2178,7 +2239,7 @@ public final class RPCProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2189,60 +2250,42 @@ public final class RPCProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CellBlockMeta parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CellBlockMeta(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint32 length = 1; public static final int LENGTH_FIELD_NUMBER = 1; private int length_; /** - * optional uint32 length = 1; - * *
      * Length of the following cell block.  Could calculate it but convenient having it too hand.
      * 
+ * + * optional uint32 length = 1; */ public boolean hasLength() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint32 length = 1; - * *
      * Length of the following cell block.  Could calculate it but convenient having it too hand.
      * 
+ * + * optional uint32 length = 1; */ public int getLength() { return length_; } - private void initFields() { - length_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -2250,16 +2293,14 @@ public final class RPCProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, length_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -2267,19 +2308,13 @@ public final class RPCProtos { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, length_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2295,12 +2330,10 @@ public final class RPCProtos { result = result && (getLength() == other.getLength()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2312,7 +2345,7 @@ public final class RPCProtos { hash = (37 * hash) + LENGTH_FIELD_NUMBER; hash = (53 * hash) + getLength(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2340,65 +2373,77 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.CellBlockMeta} - * *
      * Optional Cell block Message.  Included in client RequestHeader
      * 
+ * + * Protobuf type {@code hbase.pb.CellBlockMeta} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CellBlockMeta) + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2411,18 +2456,15 @@ public final class RPCProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); length_ = 0; @@ -2430,10 +2472,6 @@ public final class RPCProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; @@ -2464,6 +2502,32 @@ public final class RPCProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta)other); @@ -2478,7 +2542,8 @@ public final class RPCProtos { if (other.hasLength()) { setLength(other.getLength()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -2495,7 +2560,7 @@ public final class RPCProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2505,34 +2570,33 @@ public final class RPCProtos { } private int bitField0_; - // optional uint32 length = 1; private int length_ ; /** - * optional uint32 length = 1; - * *
        * Length of the following cell block.  Could calculate it but convenient having it too hand.
        * 
+ * + * optional uint32 length = 1; */ public boolean hasLength() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint32 length = 1; - * *
        * Length of the following cell block.  Could calculate it but convenient having it too hand.
        * 
+ * + * optional uint32 length = 1; */ public int getLength() { return length_; } /** - * optional uint32 length = 1; - * *
        * Length of the following cell block.  Could calculate it but convenient having it too hand.
        * 
+ * + * optional uint32 length = 1; */ public Builder setLength(int value) { bitField0_ |= 0x00000001; @@ -2541,11 +2605,11 @@ public final class RPCProtos { return this; } /** - * optional uint32 length = 1; - * *
        * Length of the following cell block.  Could calculate it but convenient having it too hand.
        * 
+ * + * optional uint32 length = 1; */ public Builder clearLength() { bitField0_ = (bitField0_ & ~0x00000001); @@ -2553,106 +2617,140 @@ public final class RPCProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CellBlockMeta) } + // @@protoc_insertion_point(class_scope:hbase.pb.CellBlockMeta) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta DEFAULT_INSTANCE; static { - defaultInstance = new CellBlockMeta(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CellBlockMeta parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CellBlockMeta(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CellBlockMeta) } - public interface ExceptionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ExceptionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ExceptionResponse) + com.google.protobuf.MessageOrBuilder { - // optional string exception_class_name = 1; /** - * optional string exception_class_name = 1; - * *
      * Class name of the exception thrown from the server
      * 
+ * + * optional string exception_class_name = 1; */ boolean hasExceptionClassName(); /** - * optional string exception_class_name = 1; - * *
      * Class name of the exception thrown from the server
      * 
+ * + * optional string exception_class_name = 1; */ java.lang.String getExceptionClassName(); /** - * optional string exception_class_name = 1; - * *
      * Class name of the exception thrown from the server
      * 
+ * + * optional string exception_class_name = 1; */ com.google.protobuf.ByteString getExceptionClassNameBytes(); - // optional string stack_trace = 2; /** - * optional string stack_trace = 2; - * *
      * Exception stack trace from the server side
      * 
+ * + * optional string stack_trace = 2; */ boolean hasStackTrace(); /** - * optional string stack_trace = 2; - * *
      * Exception stack trace from the server side
      * 
+ * + * optional string stack_trace = 2; */ java.lang.String getStackTrace(); /** - * optional string stack_trace = 2; - * *
      * Exception stack trace from the server side
      * 
+ * + * optional string stack_trace = 2; */ com.google.protobuf.ByteString getStackTraceBytes(); - // optional string hostname = 3; /** - * optional string hostname = 3; - * *
      * Optional hostname.  Filled in for some exceptions such as region moved
      * where exception gives clue on where the region may have moved.
      * 
+ * + * optional string hostname = 3; */ boolean hasHostname(); /** - * optional string hostname = 3; - * *
      * Optional hostname.  Filled in for some exceptions such as region moved
      * where exception gives clue on where the region may have moved.
      * 
+ * + * optional string hostname = 3; */ java.lang.String getHostname(); /** - * optional string hostname = 3; - * *
      * Optional hostname.  Filled in for some exceptions such as region moved
      * where exception gives clue on where the region may have moved.
      * 
+ * + * optional string hostname = 3; */ com.google.protobuf.ByteString getHostnameBytes(); - // optional int32 port = 4; /** * optional int32 port = 4; */ @@ -2662,62 +2760,57 @@ public final class RPCProtos { */ int getPort(); - // optional bool do_not_retry = 5; /** - * optional bool do_not_retry = 5; - * *
      * Set if we are NOT to retry on receipt of this exception
      * 
+ * + * optional bool do_not_retry = 5; */ boolean hasDoNotRetry(); /** - * optional bool do_not_retry = 5; - * *
      * Set if we are NOT to retry on receipt of this exception
      * 
+ * + * optional bool do_not_retry = 5; */ boolean getDoNotRetry(); } /** - * Protobuf type {@code hbase.pb.ExceptionResponse} - * *
    * At the RPC layer, this message is used to carry
    * the server side exception to the RPC client.
    * 
+ * + * Protobuf type {@code hbase.pb.ExceptionResponse} */ - public static final class ExceptionResponse extends - com.google.protobuf.GeneratedMessage - implements ExceptionResponseOrBuilder { + public static final class ExceptionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ExceptionResponse) + ExceptionResponseOrBuilder { // Use ExceptionResponse.newBuilder() to construct. - private ExceptionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ExceptionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ExceptionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ExceptionResponse defaultInstance; - public static ExceptionResponse getDefaultInstance() { - return defaultInstance; } - - public ExceptionResponse getDefaultInstanceForType() { - return defaultInstance; + private ExceptionResponse() { + exceptionClassName_ = ""; + stackTrace_ = ""; + hostname_ = ""; + port_ = 0; + doNotRetry_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ExceptionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2737,18 +2830,21 @@ public final class RPCProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - exceptionClassName_ = input.readBytes(); + exceptionClassName_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - stackTrace_ = input.readBytes(); + stackTrace_ = bs; break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - hostname_ = input.readBytes(); + hostname_ = bs; break; } case 32: { @@ -2767,7 +2863,7 @@ public final class RPCProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2778,48 +2874,32 @@ public final class RPCProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ExceptionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ExceptionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional string exception_class_name = 1; public static final int EXCEPTION_CLASS_NAME_FIELD_NUMBER = 1; - private java.lang.Object exceptionClassName_; + private volatile java.lang.Object exceptionClassName_; /** - * optional string exception_class_name = 1; - * *
      * Class name of the exception thrown from the server
      * 
+ * + * optional string exception_class_name = 1; */ public boolean hasExceptionClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional string exception_class_name = 1; - * *
      * Class name of the exception thrown from the server
      * 
+ * + * optional string exception_class_name = 1; */ public java.lang.String getExceptionClassName() { java.lang.Object ref = exceptionClassName_; @@ -2836,11 +2916,11 @@ public final class RPCProtos { } } /** - * optional string exception_class_name = 1; - * *
      * Class name of the exception thrown from the server
      * 
+ * + * optional string exception_class_name = 1; */ public com.google.protobuf.ByteString getExceptionClassNameBytes() { @@ -2856,25 +2936,24 @@ public final class RPCProtos { } } - // optional string stack_trace = 2; public static final int STACK_TRACE_FIELD_NUMBER = 2; - private java.lang.Object stackTrace_; + private volatile java.lang.Object stackTrace_; /** - * optional string stack_trace = 2; - * *
      * Exception stack trace from the server side
      * 
+ * + * optional string stack_trace = 2; */ public boolean hasStackTrace() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string stack_trace = 2; - * *
      * Exception stack trace from the server side
      * 
+ * + * optional string stack_trace = 2; */ public java.lang.String getStackTrace() { java.lang.Object ref = stackTrace_; @@ -2891,11 +2970,11 @@ public final class RPCProtos { } } /** - * optional string stack_trace = 2; - * *
      * Exception stack trace from the server side
      * 
+ * + * optional string stack_trace = 2; */ public com.google.protobuf.ByteString getStackTraceBytes() { @@ -2911,27 +2990,26 @@ public final class RPCProtos { } } - // optional string hostname = 3; public static final int HOSTNAME_FIELD_NUMBER = 3; - private java.lang.Object hostname_; + private volatile java.lang.Object hostname_; /** - * optional string hostname = 3; - * *
      * Optional hostname.  Filled in for some exceptions such as region moved
      * where exception gives clue on where the region may have moved.
      * 
+ * + * optional string hostname = 3; */ public boolean hasHostname() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional string hostname = 3; - * *
      * Optional hostname.  Filled in for some exceptions such as region moved
      * where exception gives clue on where the region may have moved.
      * 
+ * + * optional string hostname = 3; */ public java.lang.String getHostname() { java.lang.Object ref = hostname_; @@ -2948,12 +3026,12 @@ public final class RPCProtos { } } /** - * optional string hostname = 3; - * *
      * Optional hostname.  Filled in for some exceptions such as region moved
      * where exception gives clue on where the region may have moved.
      * 
+ * + * optional string hostname = 3; */ public com.google.protobuf.ByteString getHostnameBytes() { @@ -2969,7 +3047,6 @@ public final class RPCProtos { } } - // optional int32 port = 4; public static final int PORT_FIELD_NUMBER = 4; private int port_; /** @@ -2985,41 +3062,34 @@ public final class RPCProtos { return port_; } - // optional bool do_not_retry = 5; public static final int DO_NOT_RETRY_FIELD_NUMBER = 5; private boolean doNotRetry_; /** - * optional bool do_not_retry = 5; - * *
      * Set if we are NOT to retry on receipt of this exception
      * 
+ * + * optional bool do_not_retry = 5; */ public boolean hasDoNotRetry() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional bool do_not_retry = 5; - * *
      * Set if we are NOT to retry on receipt of this exception
      * 
+ * + * optional bool do_not_retry = 5; */ public boolean getDoNotRetry() { return doNotRetry_; } - private void initFields() { - exceptionClassName_ = ""; - stackTrace_ = ""; - hostname_ = ""; - port_ = 0; - doNotRetry_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -3027,15 +3097,14 @@ public final class RPCProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getExceptionClassNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, exceptionClassName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getStackTraceBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, stackTrace_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getHostnameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, hostname_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeInt32(4, port_); @@ -3043,26 +3112,22 @@ public final class RPCProtos { if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, doNotRetry_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getExceptionClassNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, exceptionClassName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getStackTraceBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, stackTrace_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getHostnameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, hostname_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream @@ -3072,19 +3137,13 @@ public final class RPCProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, doNotRetry_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3120,12 +3179,10 @@ public final class RPCProtos { result = result && (getDoNotRetry() == other.getDoNotRetry()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3151,9 +3208,10 @@ public final class RPCProtos { } if (hasDoNotRetry()) { hash = (37 * hash) + DO_NOT_RETRY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getDoNotRetry()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDoNotRetry()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3181,66 +3239,78 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ExceptionResponse} - * *
      * At the RPC layer, this message is used to carry
      * the server side exception to the RPC client.
      * 
+ * + * Protobuf type {@code hbase.pb.ExceptionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ExceptionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3253,18 +3323,15 @@ public final class RPCProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); exceptionClassName_ = ""; @@ -3280,10 +3347,6 @@ public final class RPCProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; @@ -3330,6 +3393,32 @@ public final class RPCProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse)other); @@ -3362,7 +3451,8 @@ public final class RPCProtos { if (other.hasDoNotRetry()) { setDoNotRetry(other.getDoNotRetry()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -3379,7 +3469,7 @@ public final class RPCProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3389,42 +3479,44 @@ public final class RPCProtos { } private int bitField0_; - // optional string exception_class_name = 1; private java.lang.Object exceptionClassName_ = ""; /** - * optional string exception_class_name = 1; - * *
        * Class name of the exception thrown from the server
        * 
+ * + * optional string exception_class_name = 1; */ public boolean hasExceptionClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional string exception_class_name = 1; - * *
        * Class name of the exception thrown from the server
        * 
+ * + * optional string exception_class_name = 1; */ public java.lang.String getExceptionClassName() { java.lang.Object ref = exceptionClassName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - exceptionClassName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + exceptionClassName_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * optional string exception_class_name = 1; - * *
        * Class name of the exception thrown from the server
        * 
+ * + * optional string exception_class_name = 1; */ public com.google.protobuf.ByteString getExceptionClassNameBytes() { @@ -3440,11 +3532,11 @@ public final class RPCProtos { } } /** - * optional string exception_class_name = 1; - * *
        * Class name of the exception thrown from the server
        * 
+ * + * optional string exception_class_name = 1; */ public Builder setExceptionClassName( java.lang.String value) { @@ -3457,11 +3549,11 @@ public final class RPCProtos { return this; } /** - * optional string exception_class_name = 1; - * *
        * Class name of the exception thrown from the server
        * 
+ * + * optional string exception_class_name = 1; */ public Builder clearExceptionClassName() { bitField0_ = (bitField0_ & ~0x00000001); @@ -3470,11 +3562,11 @@ public final class RPCProtos { return this; } /** - * optional string exception_class_name = 1; - * *
        * Class name of the exception thrown from the server
        * 
+ * + * optional string exception_class_name = 1; */ public Builder setExceptionClassNameBytes( com.google.protobuf.ByteString value) { @@ -3487,42 +3579,44 @@ public final class RPCProtos { return this; } - // optional string stack_trace = 2; private java.lang.Object stackTrace_ = ""; /** - * optional string stack_trace = 2; - * *
        * Exception stack trace from the server side
        * 
+ * + * optional string stack_trace = 2; */ public boolean hasStackTrace() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string stack_trace = 2; - * *
        * Exception stack trace from the server side
        * 
+ * + * optional string stack_trace = 2; */ public java.lang.String getStackTrace() { java.lang.Object ref = stackTrace_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - stackTrace_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + stackTrace_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * optional string stack_trace = 2; - * *
        * Exception stack trace from the server side
        * 
+ * + * optional string stack_trace = 2; */ public com.google.protobuf.ByteString getStackTraceBytes() { @@ -3538,11 +3632,11 @@ public final class RPCProtos { } } /** - * optional string stack_trace = 2; - * *
        * Exception stack trace from the server side
        * 
+ * + * optional string stack_trace = 2; */ public Builder setStackTrace( java.lang.String value) { @@ -3555,11 +3649,11 @@ public final class RPCProtos { return this; } /** - * optional string stack_trace = 2; - * *
        * Exception stack trace from the server side
        * 
+ * + * optional string stack_trace = 2; */ public Builder clearStackTrace() { bitField0_ = (bitField0_ & ~0x00000002); @@ -3568,11 +3662,11 @@ public final class RPCProtos { return this; } /** - * optional string stack_trace = 2; - * *
        * Exception stack trace from the server side
        * 
+ * + * optional string stack_trace = 2; */ public Builder setStackTraceBytes( com.google.protobuf.ByteString value) { @@ -3585,45 +3679,47 @@ public final class RPCProtos { return this; } - // optional string hostname = 3; private java.lang.Object hostname_ = ""; /** - * optional string hostname = 3; - * *
        * Optional hostname.  Filled in for some exceptions such as region moved
        * where exception gives clue on where the region may have moved.
        * 
+ * + * optional string hostname = 3; */ public boolean hasHostname() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional string hostname = 3; - * *
        * Optional hostname.  Filled in for some exceptions such as region moved
        * where exception gives clue on where the region may have moved.
        * 
+ * + * optional string hostname = 3; */ public java.lang.String getHostname() { java.lang.Object ref = hostname_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - hostname_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + hostname_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * optional string hostname = 3; - * *
        * Optional hostname.  Filled in for some exceptions such as region moved
        * where exception gives clue on where the region may have moved.
        * 
+ * + * optional string hostname = 3; */ public com.google.protobuf.ByteString getHostnameBytes() { @@ -3639,12 +3735,12 @@ public final class RPCProtos { } } /** - * optional string hostname = 3; - * *
        * Optional hostname.  Filled in for some exceptions such as region moved
        * where exception gives clue on where the region may have moved.
        * 
+ * + * optional string hostname = 3; */ public Builder setHostname( java.lang.String value) { @@ -3657,12 +3753,12 @@ public final class RPCProtos { return this; } /** - * optional string hostname = 3; - * *
        * Optional hostname.  Filled in for some exceptions such as region moved
        * where exception gives clue on where the region may have moved.
        * 
+ * + * optional string hostname = 3; */ public Builder clearHostname() { bitField0_ = (bitField0_ & ~0x00000004); @@ -3671,12 +3767,12 @@ public final class RPCProtos { return this; } /** - * optional string hostname = 3; - * *
        * Optional hostname.  Filled in for some exceptions such as region moved
        * where exception gives clue on where the region may have moved.
        * 
+ * + * optional string hostname = 3; */ public Builder setHostnameBytes( com.google.protobuf.ByteString value) { @@ -3689,7 +3785,6 @@ public final class RPCProtos { return this; } - // optional int32 port = 4; private int port_ ; /** * optional int32 port = 4; @@ -3722,34 +3817,33 @@ public final class RPCProtos { return this; } - // optional bool do_not_retry = 5; private boolean doNotRetry_ ; /** - * optional bool do_not_retry = 5; - * *
        * Set if we are NOT to retry on receipt of this exception
        * 
+ * + * optional bool do_not_retry = 5; */ public boolean hasDoNotRetry() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional bool do_not_retry = 5; - * *
        * Set if we are NOT to retry on receipt of this exception
        * 
+ * + * optional bool do_not_retry = 5; */ public boolean getDoNotRetry() { return doNotRetry_; } /** - * optional bool do_not_retry = 5; - * *
        * Set if we are NOT to retry on receipt of this exception
        * 
+ * + * optional bool do_not_retry = 5; */ public Builder setDoNotRetry(boolean value) { bitField0_ |= 0x00000010; @@ -3758,11 +3852,11 @@ public final class RPCProtos { return this; } /** - * optional bool do_not_retry = 5; - * *
        * Set if we are NOT to retry on receipt of this exception
        * 
+ * + * optional bool do_not_retry = 5; */ public Builder clearDoNotRetry() { bitField0_ = (bitField0_ & ~0x00000010); @@ -3770,40 +3864,76 @@ public final class RPCProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ExceptionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ExceptionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse DEFAULT_INSTANCE; static { - defaultInstance = new ExceptionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ExceptionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExceptionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ExceptionResponse) } - public interface RequestHeaderOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RequestHeaderOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RequestHeader) + com.google.protobuf.MessageOrBuilder { - // optional uint32 call_id = 1; /** - * optional uint32 call_id = 1; - * *
      * Monotonically increasing call_id to keep track of RPC requests and their response
      * 
+ * + * optional uint32 call_id = 1; */ boolean hasCallId(); /** - * optional uint32 call_id = 1; - * *
      * Monotonically increasing call_id to keep track of RPC requests and their response
      * 
+ * + * optional uint32 call_id = 1; */ int getCallId(); - // optional .hbase.pb.RPCTInfo trace_info = 2; /** * optional .hbase.pb.RPCTInfo trace_info = 2; */ @@ -3817,7 +3947,6 @@ public final class RPCProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder(); - // optional string method_name = 3; /** * optional string method_name = 3; */ @@ -3832,71 +3961,67 @@ public final class RPCProtos { com.google.protobuf.ByteString getMethodNameBytes(); - // optional bool request_param = 4; /** - * optional bool request_param = 4; - * *
      * If true, then a pb Message param follows.
      * 
+ * + * optional bool request_param = 4; */ boolean hasRequestParam(); /** - * optional bool request_param = 4; - * *
      * If true, then a pb Message param follows.
      * 
+ * + * optional bool request_param = 4; */ boolean getRequestParam(); - // optional .hbase.pb.CellBlockMeta cell_block_meta = 5; /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ boolean hasCellBlockMeta(); /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta(); /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder(); - // optional uint32 priority = 6; /** - * optional uint32 priority = 6; - * *
      * 0 is NORMAL priority.  200 is HIGH.  If no priority, treat it as NORMAL.
      * See HConstants.
      * 
+ * + * optional uint32 priority = 6; */ boolean hasPriority(); /** - * optional uint32 priority = 6; - * *
      * 0 is NORMAL priority.  200 is HIGH.  If no priority, treat it as NORMAL.
      * See HConstants.
      * 
+ * + * optional uint32 priority = 6; */ int getPriority(); - // optional uint32 timeout = 7; /** * optional uint32 timeout = 7; */ @@ -3907,42 +4032,38 @@ public final class RPCProtos { int getTimeout(); } /** - * Protobuf type {@code hbase.pb.RequestHeader} - * *
    * Header sent making a request.
    * 
+ * + * Protobuf type {@code hbase.pb.RequestHeader} */ - public static final class RequestHeader extends - com.google.protobuf.GeneratedMessage - implements RequestHeaderOrBuilder { + public static final class RequestHeader extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RequestHeader) + RequestHeaderOrBuilder { // Use RequestHeader.newBuilder() to construct. - private RequestHeader(com.google.protobuf.GeneratedMessage.Builder builder) { + private RequestHeader(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RequestHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RequestHeader defaultInstance; - public static RequestHeader getDefaultInstance() { - return defaultInstance; } - - public RequestHeader getDefaultInstanceForType() { - return defaultInstance; + private RequestHeader() { + callId_ = 0; + methodName_ = ""; + requestParam_ = false; + priority_ = 0; + timeout_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RequestHeader( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3980,8 +4101,9 @@ public final class RPCProtos { break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - methodName_ = input.readBytes(); + methodName_ = bs; break; } case 32: { @@ -4018,7 +4140,7 @@ public final class RPCProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -4029,54 +4151,37 @@ public final class RPCProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RequestHeader parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RequestHeader(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint32 call_id = 1; public static final int CALL_ID_FIELD_NUMBER = 1; private int callId_; /** - * optional uint32 call_id = 1; - * *
      * Monotonically increasing call_id to keep track of RPC requests and their response
      * 
+ * + * optional uint32 call_id = 1; */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint32 call_id = 1; - * *
      * Monotonically increasing call_id to keep track of RPC requests and their response
      * 
+ * + * optional uint32 call_id = 1; */ public int getCallId() { return callId_; } - // optional .hbase.pb.RPCTInfo trace_info = 2; public static final int TRACE_INFO_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo traceInfo_; /** @@ -4089,18 +4194,17 @@ public final class RPCProtos { * optional .hbase.pb.RPCTInfo trace_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo() { - return traceInfo_; + return traceInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance() : traceInfo_; } /** * optional .hbase.pb.RPCTInfo trace_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder() { - return traceInfo_; + return traceInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance() : traceInfo_; } - // optional string method_name = 3; public static final int METHOD_NAME_FIELD_NUMBER = 3; - private java.lang.Object methodName_; + private volatile java.lang.Object methodName_; /** * optional string method_name = 3; */ @@ -4141,91 +4245,87 @@ public final class RPCProtos { } } - // optional bool request_param = 4; public static final int REQUEST_PARAM_FIELD_NUMBER = 4; private boolean requestParam_; /** - * optional bool request_param = 4; - * *
      * If true, then a pb Message param follows.
      * 
+ * + * optional bool request_param = 4; */ public boolean hasRequestParam() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional bool request_param = 4; - * *
      * If true, then a pb Message param follows.
      * 
+ * + * optional bool request_param = 4; */ public boolean getRequestParam() { return requestParam_; } - // optional .hbase.pb.CellBlockMeta cell_block_meta = 5; public static final int CELL_BLOCK_META_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_; /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { - return cellBlockMeta_; + return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { - return cellBlockMeta_; + return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } - // optional uint32 priority = 6; public static final int PRIORITY_FIELD_NUMBER = 6; private int priority_; /** - * optional uint32 priority = 6; - * *
      * 0 is NORMAL priority.  200 is HIGH.  If no priority, treat it as NORMAL.
      * See HConstants.
      * 
+ * + * optional uint32 priority = 6; */ public boolean hasPriority() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * optional uint32 priority = 6; - * *
      * 0 is NORMAL priority.  200 is HIGH.  If no priority, treat it as NORMAL.
      * See HConstants.
      * 
+ * + * optional uint32 priority = 6; */ public int getPriority() { return priority_; } - // optional uint32 timeout = 7; public static final int TIMEOUT_FIELD_NUMBER = 7; private int timeout_; /** @@ -4241,19 +4341,11 @@ public final class RPCProtos { return timeout_; } - private void initFields() { - callId_ = 0; - traceInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); - methodName_ = ""; - requestParam_ = false; - cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); - priority_ = 0; - timeout_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -4261,21 +4353,20 @@ public final class RPCProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, traceInfo_); + output.writeMessage(2, getTraceInfo()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getMethodNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, methodName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(4, requestParam_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, cellBlockMeta_); + output.writeMessage(5, getCellBlockMeta()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeUInt32(6, priority_); @@ -4283,12 +4374,11 @@ public final class RPCProtos { if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt32(7, timeout_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -4298,11 +4388,10 @@ public final class RPCProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, traceInfo_); + .computeMessageSize(2, getTraceInfo()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getMethodNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, methodName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream @@ -4310,7 +4399,7 @@ public final class RPCProtos { } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, cellBlockMeta_); + .computeMessageSize(5, getCellBlockMeta()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream @@ -4320,19 +4409,13 @@ public final class RPCProtos { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(7, timeout_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4378,12 +4461,10 @@ public final class RPCProtos { result = result && (getTimeout() == other.getTimeout()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4405,7 +4486,8 @@ public final class RPCProtos { } if (hasRequestParam()) { hash = (37 * hash) + REQUEST_PARAM_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getRequestParam()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getRequestParam()); } if (hasCellBlockMeta()) { hash = (37 * hash) + CELL_BLOCK_META_FIELD_NUMBER; @@ -4419,7 +4501,7 @@ public final class RPCProtos { hash = (37 * hash) + TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + getTimeout(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4447,65 +4529,77 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RequestHeader} - * *
      * Header sent making a request.
      * 
+ * + * Protobuf type {@code hbase.pb.RequestHeader} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeaderOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RequestHeader) + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4518,26 +4612,23 @@ public final class RPCProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTraceInfoFieldBuilder(); getCellBlockMetaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (traceInfoBuilder_ == null) { - traceInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); + traceInfo_ = null; } else { traceInfoBuilder_.clear(); } @@ -4547,7 +4638,7 @@ public final class RPCProtos { requestParam_ = false; bitField0_ = (bitField0_ & ~0x00000008); if (cellBlockMetaBuilder_ == null) { - cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); + cellBlockMeta_ = null; } else { cellBlockMetaBuilder_.clear(); } @@ -4559,10 +4650,6 @@ public final class RPCProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_descriptor; @@ -4625,6 +4712,32 @@ public final class RPCProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader)other); @@ -4659,7 +4772,8 @@ public final class RPCProtos { if (other.hasTimeout()) { setTimeout(other.getTimeout()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -4676,7 +4790,7 @@ public final class RPCProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4686,34 +4800,33 @@ public final class RPCProtos { } private int bitField0_; - // optional uint32 call_id = 1; private int callId_ ; /** - * optional uint32 call_id = 1; - * *
        * Monotonically increasing call_id to keep track of RPC requests and their response
        * 
+ * + * optional uint32 call_id = 1; */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint32 call_id = 1; - * *
        * Monotonically increasing call_id to keep track of RPC requests and their response
        * 
+ * + * optional uint32 call_id = 1; */ public int getCallId() { return callId_; } /** - * optional uint32 call_id = 1; - * *
        * Monotonically increasing call_id to keep track of RPC requests and their response
        * 
+ * + * optional uint32 call_id = 1; */ public Builder setCallId(int value) { bitField0_ |= 0x00000001; @@ -4722,11 +4835,11 @@ public final class RPCProtos { return this; } /** - * optional uint32 call_id = 1; - * *
        * Monotonically increasing call_id to keep track of RPC requests and their response
        * 
+ * + * optional uint32 call_id = 1; */ public Builder clearCallId() { bitField0_ = (bitField0_ & ~0x00000001); @@ -4735,9 +4848,8 @@ public final class RPCProtos { return this; } - // optional .hbase.pb.RPCTInfo trace_info = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo traceInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo traceInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder> traceInfoBuilder_; /** * optional .hbase.pb.RPCTInfo trace_info = 2; @@ -4750,7 +4862,7 @@ public final class RPCProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo() { if (traceInfoBuilder_ == null) { - return traceInfo_; + return traceInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance() : traceInfo_; } else { return traceInfoBuilder_.getMessage(); } @@ -4791,6 +4903,7 @@ public final class RPCProtos { public Builder mergeTraceInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo value) { if (traceInfoBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + traceInfo_ != null && traceInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance()) { traceInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.newBuilder(traceInfo_).mergeFrom(value).buildPartial(); @@ -4809,7 +4922,7 @@ public final class RPCProtos { */ public Builder clearTraceInfo() { if (traceInfoBuilder_ == null) { - traceInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); + traceInfo_ = null; onChanged(); } else { traceInfoBuilder_.clear(); @@ -4832,19 +4945,20 @@ public final class RPCProtos { if (traceInfoBuilder_ != null) { return traceInfoBuilder_.getMessageOrBuilder(); } else { - return traceInfo_; + return traceInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance() : traceInfo_; } } /** * optional .hbase.pb.RPCTInfo trace_info = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder> getTraceInfoFieldBuilder() { if (traceInfoBuilder_ == null) { - traceInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + traceInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder>( - traceInfo_, + getTraceInfo(), getParentForChildren(), isClean()); traceInfo_ = null; @@ -4852,7 +4966,6 @@ public final class RPCProtos { return traceInfoBuilder_; } - // optional string method_name = 3; private java.lang.Object methodName_ = ""; /** * optional string method_name = 3; @@ -4866,9 +4979,12 @@ public final class RPCProtos { public java.lang.String getMethodName() { java.lang.Object ref = methodName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - methodName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + methodName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -4926,34 +5042,33 @@ public final class RPCProtos { return this; } - // optional bool request_param = 4; private boolean requestParam_ ; /** - * optional bool request_param = 4; - * *
        * If true, then a pb Message param follows.
        * 
+ * + * optional bool request_param = 4; */ public boolean hasRequestParam() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional bool request_param = 4; - * *
        * If true, then a pb Message param follows.
        * 
+ * + * optional bool request_param = 4; */ public boolean getRequestParam() { return requestParam_; } /** - * optional bool request_param = 4; - * *
        * If true, then a pb Message param follows.
        * 
+ * + * optional bool request_param = 4; */ public Builder setRequestParam(boolean value) { bitField0_ |= 0x00000008; @@ -4962,11 +5077,11 @@ public final class RPCProtos { return this; } /** - * optional bool request_param = 4; - * *
        * If true, then a pb Message param follows.
        * 
+ * + * optional bool request_param = 4; */ public Builder clearRequestParam() { bitField0_ = (bitField0_ & ~0x00000008); @@ -4975,40 +5090,39 @@ public final class RPCProtos { return this; } - // optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> cellBlockMetaBuilder_; /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { - return cellBlockMeta_; + return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } else { return cellBlockMetaBuilder_.getMessage(); } } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public Builder setCellBlockMeta(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { @@ -5024,11 +5138,11 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public Builder setCellBlockMeta( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder builderForValue) { @@ -5042,15 +5156,16 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public Builder mergeCellBlockMeta(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && + cellBlockMeta_ != null && cellBlockMeta_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) { cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder(cellBlockMeta_).mergeFrom(value).buildPartial(); @@ -5065,15 +5180,15 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public Builder clearCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { - cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); + cellBlockMeta_ = null; onChanged(); } else { cellBlockMetaBuilder_.clear(); @@ -5082,11 +5197,11 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder getCellBlockMetaBuilder() { bitField0_ |= 0x00000010; @@ -5094,33 +5209,34 @@ public final class RPCProtos { return getCellBlockMetaFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { if (cellBlockMetaBuilder_ != null) { return cellBlockMetaBuilder_.getMessageOrBuilder(); } else { - return cellBlockMeta_; + return cellBlockMeta_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 5; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> getCellBlockMetaFieldBuilder() { if (cellBlockMetaBuilder_ == null) { - cellBlockMetaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + cellBlockMetaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder>( - cellBlockMeta_, + getCellBlockMeta(), getParentForChildren(), isClean()); cellBlockMeta_ = null; @@ -5128,37 +5244,36 @@ public final class RPCProtos { return cellBlockMetaBuilder_; } - // optional uint32 priority = 6; private int priority_ ; /** - * optional uint32 priority = 6; - * *
        * 0 is NORMAL priority.  200 is HIGH.  If no priority, treat it as NORMAL.
        * See HConstants.
        * 
+ * + * optional uint32 priority = 6; */ public boolean hasPriority() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * optional uint32 priority = 6; - * *
        * 0 is NORMAL priority.  200 is HIGH.  If no priority, treat it as NORMAL.
        * See HConstants.
        * 
+ * + * optional uint32 priority = 6; */ public int getPriority() { return priority_; } /** - * optional uint32 priority = 6; - * *
        * 0 is NORMAL priority.  200 is HIGH.  If no priority, treat it as NORMAL.
        * See HConstants.
        * 
+ * + * optional uint32 priority = 6; */ public Builder setPriority(int value) { bitField0_ |= 0x00000020; @@ -5167,12 +5282,12 @@ public final class RPCProtos { return this; } /** - * optional uint32 priority = 6; - * *
        * 0 is NORMAL priority.  200 is HIGH.  If no priority, treat it as NORMAL.
        * See HConstants.
        * 
+ * + * optional uint32 priority = 6; */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00000020); @@ -5181,7 +5296,6 @@ public final class RPCProtos { return this; } - // optional uint32 timeout = 7; private int timeout_ ; /** * optional uint32 timeout = 7; @@ -5213,22 +5327,59 @@ public final class RPCProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RequestHeader) } + // @@protoc_insertion_point(class_scope:hbase.pb.RequestHeader) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader DEFAULT_INSTANCE; static { - defaultInstance = new RequestHeader(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RequestHeader parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RequestHeader(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RequestHeader) } - public interface ResponseHeaderOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ResponseHeaderOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ResponseHeader) + com.google.protobuf.MessageOrBuilder { - // optional uint32 call_id = 1; /** * optional uint32 call_id = 1; */ @@ -5238,91 +5389,81 @@ public final class RPCProtos { */ int getCallId(); - // optional .hbase.pb.ExceptionResponse exception = 2; /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
      * If present, then request threw an exception and no response message (else we presume one)
      * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ boolean hasException(); /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
      * If present, then request threw an exception and no response message (else we presume one)
      * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getException(); /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
      * If present, then request threw an exception and no response message (else we presume one)
      * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder(); - // optional .hbase.pb.CellBlockMeta cell_block_meta = 3; /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ boolean hasCellBlockMeta(); /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta(); /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder(); } /** * Protobuf type {@code hbase.pb.ResponseHeader} */ - public static final class ResponseHeader extends - com.google.protobuf.GeneratedMessage - implements ResponseHeaderOrBuilder { + public static final class ResponseHeader extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ResponseHeader) + ResponseHeaderOrBuilder { // Use ResponseHeader.newBuilder() to construct. - private ResponseHeader(com.google.protobuf.GeneratedMessage.Builder builder) { + private ResponseHeader(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ResponseHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ResponseHeader defaultInstance; - public static ResponseHeader getDefaultInstance() { - return defaultInstance; } - - public ResponseHeader getDefaultInstanceForType() { - return defaultInstance; + private ResponseHeader() { + callId_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ResponseHeader( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -5378,7 +5519,7 @@ public final class RPCProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -5389,30 +5530,14 @@ public final class RPCProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ResponseHeader parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ResponseHeader(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional uint32 call_id = 1; public static final int CALL_ID_FIELD_NUMBER = 1; private int callId_; /** @@ -5428,83 +5553,77 @@ public final class RPCProtos { return callId_; } - // optional .hbase.pb.ExceptionResponse exception = 2; public static final int EXCEPTION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse exception_; /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
      * If present, then request threw an exception and no response message (else we presume one)
      * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
      * If present, then request threw an exception and no response message (else we presume one)
      * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getException() { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance() : exception_; } /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
      * If present, then request threw an exception and no response message (else we presume one)
      * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder() { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance() : exception_; } - // optional .hbase.pb.CellBlockMeta cell_block_meta = 3; public static final int CELL_BLOCK_META_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_; /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { - return cellBlockMeta_; + return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
      * If present, then an encoded data block follows.
      * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { - return cellBlockMeta_; + return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } - private void initFields() { - callId_ = 0; - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); - cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -5512,22 +5631,20 @@ public final class RPCProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, exception_); + output.writeMessage(2, getException()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, cellBlockMeta_); + output.writeMessage(3, getCellBlockMeta()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -5537,25 +5654,19 @@ public final class RPCProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, exception_); + .computeMessageSize(2, getException()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, cellBlockMeta_); + .computeMessageSize(3, getCellBlockMeta()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5581,12 +5692,10 @@ public final class RPCProtos { result = result && getCellBlockMeta() .equals(other.getCellBlockMeta()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5606,7 +5715,7 @@ public final class RPCProtos { hash = (37 * hash) + CELL_BLOCK_META_FIELD_NUMBER; hash = (53 * hash) + getCellBlockMeta().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5634,46 +5743,57 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -5681,14 +5801,15 @@ public final class RPCProtos { * Protobuf type {@code hbase.pb.ResponseHeader} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeaderOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ResponseHeader) + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5701,32 +5822,29 @@ public final class RPCProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getExceptionFieldBuilder(); getCellBlockMetaFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); + exception_ = null; } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (cellBlockMetaBuilder_ == null) { - cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); + cellBlockMeta_ = null; } else { cellBlockMetaBuilder_.clear(); } @@ -5734,10 +5852,6 @@ public final class RPCProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_descriptor; @@ -5784,6 +5898,32 @@ public final class RPCProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader)other); @@ -5804,7 +5944,8 @@ public final class RPCProtos { if (other.hasCellBlockMeta()) { mergeCellBlockMeta(other.getCellBlockMeta()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -5821,7 +5962,7 @@ public final class RPCProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5831,7 +5972,6 @@ public final class RPCProtos { } private int bitField0_; - // optional uint32 call_id = 1; private int callId_ ; /** * optional uint32 call_id = 1; @@ -5864,40 +6004,39 @@ public final class RPCProtos { return this; } - // optional .hbase.pb.ExceptionResponse exception = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse exception_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder> exceptionBuilder_; /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
        * If present, then request threw an exception and no response message (else we presume one)
        * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
        * If present, then request threw an exception and no response message (else we presume one)
        * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getException() { if (exceptionBuilder_ == null) { - return exception_; + return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance() : exception_; } else { return exceptionBuilder_.getMessage(); } } /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
        * If present, then request threw an exception and no response message (else we presume one)
        * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public Builder setException(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse value) { if (exceptionBuilder_ == null) { @@ -5913,11 +6052,11 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
        * If present, then request threw an exception and no response message (else we presume one)
        * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public Builder setException( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder builderForValue) { @@ -5931,15 +6070,16 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
        * If present, then request threw an exception and no response message (else we presume one)
        * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public Builder mergeException(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + exception_ != null && exception_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) { exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder(exception_).mergeFrom(value).buildPartial(); @@ -5954,15 +6094,15 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
        * If present, then request threw an exception and no response message (else we presume one)
        * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public Builder clearException() { if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); + exception_ = null; onChanged(); } else { exceptionBuilder_.clear(); @@ -5971,11 +6111,11 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
        * If present, then request threw an exception and no response message (else we presume one)
        * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder getExceptionBuilder() { bitField0_ |= 0x00000002; @@ -5983,33 +6123,34 @@ public final class RPCProtos { return getExceptionFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
        * If present, then request threw an exception and no response message (else we presume one)
        * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { - return exception_; + return exception_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance() : exception_; } } /** - * optional .hbase.pb.ExceptionResponse exception = 2; - * *
        * If present, then request threw an exception and no response message (else we presume one)
        * 
+ * + * optional .hbase.pb.ExceptionResponse exception = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { - exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder>( - exception_, + getException(), getParentForChildren(), isClean()); exception_ = null; @@ -6017,40 +6158,39 @@ public final class RPCProtos { return exceptionBuilder_; } - // optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> cellBlockMetaBuilder_; /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { - return cellBlockMeta_; + return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } else { return cellBlockMetaBuilder_.getMessage(); } } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public Builder setCellBlockMeta(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { @@ -6066,11 +6206,11 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public Builder setCellBlockMeta( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder builderForValue) { @@ -6084,15 +6224,16 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public Builder mergeCellBlockMeta(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && + cellBlockMeta_ != null && cellBlockMeta_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) { cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder(cellBlockMeta_).mergeFrom(value).buildPartial(); @@ -6107,15 +6248,15 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public Builder clearCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { - cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); + cellBlockMeta_ = null; onChanged(); } else { cellBlockMetaBuilder_.clear(); @@ -6124,11 +6265,11 @@ public final class RPCProtos { return this; } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder getCellBlockMetaBuilder() { bitField0_ |= 0x00000004; @@ -6136,87 +6277,125 @@ public final class RPCProtos { return getCellBlockMetaFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { if (cellBlockMetaBuilder_ != null) { return cellBlockMetaBuilder_.getMessageOrBuilder(); } else { - return cellBlockMeta_; + return cellBlockMeta_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } } /** - * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; - * *
        * If present, then an encoded data block follows.
        * 
+ * + * optional .hbase.pb.CellBlockMeta cell_block_meta = 3; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> getCellBlockMetaFieldBuilder() { if (cellBlockMetaBuilder_ == null) { - cellBlockMetaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + cellBlockMetaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder>( - cellBlockMeta_, + getCellBlockMeta(), getParentForChildren(), isClean()); cellBlockMeta_ = null; } return cellBlockMetaBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ResponseHeader) } + // @@protoc_insertion_point(class_scope:hbase.pb.ResponseHeader) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader DEFAULT_INSTANCE; static { - defaultInstance = new ResponseHeader(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ResponseHeader parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ResponseHeader(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ResponseHeader) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UserInformation_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UserInformation_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ConnectionHeader_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CellBlockMeta_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ExceptionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RequestHeader_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RequestHeader_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ResponseHeader_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ResponseHeader_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -6244,55 +6423,57 @@ public final class RPCProtos { "haded.protobuf.generatedB\tRPCProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_UserInformation_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_UserInformation_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_UserInformation_descriptor, - new java.lang.String[] { "EffectiveUser", "RealUser", }); - internal_static_hbase_pb_ConnectionHeader_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ConnectionHeader_descriptor, - new java.lang.String[] { "UserInfo", "ServiceName", "CellBlockCodecClass", "CellBlockCompressorClass", "VersionInfo", }); - internal_static_hbase_pb_CellBlockMeta_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CellBlockMeta_descriptor, - new java.lang.String[] { "Length", }); - internal_static_hbase_pb_ExceptionResponse_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ExceptionResponse_descriptor, - new java.lang.String[] { "ExceptionClassName", "StackTrace", "Hostname", "Port", "DoNotRetry", }); - internal_static_hbase_pb_RequestHeader_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_RequestHeader_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RequestHeader_descriptor, - new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", "Priority", "Timeout", }); - internal_static_hbase_pb_ResponseHeader_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_ResponseHeader_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ResponseHeader_descriptor, - new java.lang.String[] { "CallId", "Exception", "CellBlockMeta", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_UserInformation_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_UserInformation_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_UserInformation_descriptor, + new java.lang.String[] { "EffectiveUser", "RealUser", }); + internal_static_hbase_pb_ConnectionHeader_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ConnectionHeader_descriptor, + new java.lang.String[] { "UserInfo", "ServiceName", "CellBlockCodecClass", "CellBlockCompressorClass", "VersionInfo", }); + internal_static_hbase_pb_CellBlockMeta_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CellBlockMeta_descriptor, + new java.lang.String[] { "Length", }); + internal_static_hbase_pb_ExceptionResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ExceptionResponse_descriptor, + new java.lang.String[] { "ExceptionClassName", "StackTrace", "Hostname", "Port", "DoNotRetry", }); + internal_static_hbase_pb_RequestHeader_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_RequestHeader_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RequestHeader_descriptor, + new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", "Priority", "Timeout", }); + internal_static_hbase_pb_ResponseHeader_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_ResponseHeader_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ResponseHeader_descriptor, + new java.lang.String[] { "CallId", "Exception", "CellBlockMeta", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java index fc6555a..66b6990 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class RegionNormalizerProtos { private RegionNormalizerProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface RegionNormalizerStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionNormalizerStateOrBuilder extends + // @@protoc_insertion_point(interface_extends:RegionNormalizerState) + com.google.protobuf.MessageOrBuilder { - // optional bool normalizer_on = 1; /** * optional bool normalizer_on = 1; */ @@ -24,36 +30,28 @@ public final class RegionNormalizerProtos { /** * Protobuf type {@code RegionNormalizerState} */ - public static final class RegionNormalizerState extends - com.google.protobuf.GeneratedMessage - implements RegionNormalizerStateOrBuilder { + public static final class RegionNormalizerState extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:RegionNormalizerState) + RegionNormalizerStateOrBuilder { // Use RegionNormalizerState.newBuilder() to construct. - private RegionNormalizerState(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionNormalizerState(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionNormalizerState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionNormalizerState defaultInstance; - public static RegionNormalizerState getDefaultInstance() { - return defaultInstance; } - - public RegionNormalizerState getDefaultInstanceForType() { - return defaultInstance; + private RegionNormalizerState() { + normalizerOn_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionNormalizerState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -83,7 +81,7 @@ public final class RegionNormalizerProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -94,30 +92,14 @@ public final class RegionNormalizerProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.internal_static_RegionNormalizerState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.internal_static_RegionNormalizerState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionNormalizerState parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionNormalizerState(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool normalizer_on = 1; public static final int NORMALIZER_ON_FIELD_NUMBER = 1; private boolean normalizerOn_; /** @@ -133,13 +115,11 @@ public final class RegionNormalizerProtos { return normalizerOn_; } - private void initFields() { - normalizerOn_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -147,16 +127,14 @@ public final class RegionNormalizerProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, normalizerOn_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -164,19 +142,13 @@ public final class RegionNormalizerProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, normalizerOn_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -192,12 +164,10 @@ public final class RegionNormalizerProtos { result = result && (getNormalizerOn() == other.getNormalizerOn()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -207,9 +177,10 @@ public final class RegionNormalizerProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNormalizerOn()) { hash = (37 * hash) + NORMALIZER_ON_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getNormalizerOn()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getNormalizerOn()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -237,46 +208,57 @@ public final class RegionNormalizerProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -284,14 +266,15 @@ public final class RegionNormalizerProtos { * Protobuf type {@code RegionNormalizerState} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerStateOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:RegionNormalizerState) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.internal_static_RegionNormalizerState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.internal_static_RegionNormalizerState_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -304,18 +287,15 @@ public final class RegionNormalizerProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); normalizerOn_ = false; @@ -323,10 +303,6 @@ public final class RegionNormalizerProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.internal_static_RegionNormalizerState_descriptor; @@ -357,6 +333,32 @@ public final class RegionNormalizerProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState)other); @@ -371,7 +373,8 @@ public final class RegionNormalizerProtos { if (other.hasNormalizerOn()) { setNormalizerOn(other.getNormalizerOn()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -388,7 +391,7 @@ public final class RegionNormalizerProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -398,7 +401,6 @@ public final class RegionNormalizerProtos { } private int bitField0_; - // optional bool normalizer_on = 1; private boolean normalizerOn_ ; /** * optional bool normalizer_on = 1; @@ -430,29 +432,66 @@ public final class RegionNormalizerProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:RegionNormalizerState) } + // @@protoc_insertion_point(class_scope:RegionNormalizerState) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState DEFAULT_INSTANCE; static { - defaultInstance = new RegionNormalizerState(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionNormalizerState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionNormalizerState(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:RegionNormalizerState) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_RegionNormalizerState_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_RegionNormalizerState_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -462,23 +501,23 @@ public final class RegionNormalizerProtos { "ratedB\026RegionNormalizerProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_RegionNormalizerState_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_RegionNormalizerState_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RegionNormalizerState_descriptor, - new java.lang.String[] { "NormalizerOn", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_RegionNormalizerState_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_RegionNormalizerState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_RegionNormalizerState_descriptor, + new java.lang.String[] { "NormalizerOn", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java index 63553af..540df5a 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java @@ -6,88 +6,91 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class RegionServerStatusProtos { private RegionServerStatusProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface RegionServerStartupRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionServerStartupRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionServerStartupRequest) + com.google.protobuf.MessageOrBuilder { - // required uint32 port = 1; /** - * required uint32 port = 1; - * *
      ** Port number this regionserver is up on 
      * 
+ * + * required uint32 port = 1; */ boolean hasPort(); /** - * required uint32 port = 1; - * *
      ** Port number this regionserver is up on 
      * 
+ * + * required uint32 port = 1; */ int getPort(); - // required uint64 server_start_code = 2; /** - * required uint64 server_start_code = 2; - * *
      ** This servers' startcode 
      * 
+ * + * required uint64 server_start_code = 2; */ boolean hasServerStartCode(); /** - * required uint64 server_start_code = 2; - * *
      ** This servers' startcode 
      * 
+ * + * required uint64 server_start_code = 2; */ long getServerStartCode(); - // required uint64 server_current_time = 3; /** - * required uint64 server_current_time = 3; - * *
      ** Current time of the region server in ms 
      * 
+ * + * required uint64 server_current_time = 3; */ boolean hasServerCurrentTime(); /** - * required uint64 server_current_time = 3; - * *
      ** Current time of the region server in ms 
      * 
+ * + * required uint64 server_current_time = 3; */ long getServerCurrentTime(); - // optional string use_this_hostname_instead = 4; /** - * optional string use_this_hostname_instead = 4; - * *
      ** hostname for region server, optional 
      * 
+ * + * optional string use_this_hostname_instead = 4; */ boolean hasUseThisHostnameInstead(); /** - * optional string use_this_hostname_instead = 4; - * *
      ** hostname for region server, optional 
      * 
+ * + * optional string use_this_hostname_instead = 4; */ java.lang.String getUseThisHostnameInstead(); /** - * optional string use_this_hostname_instead = 4; - * *
      ** hostname for region server, optional 
      * 
+ * + * optional string use_this_hostname_instead = 4; */ com.google.protobuf.ByteString getUseThisHostnameInsteadBytes(); @@ -95,36 +98,31 @@ public final class RegionServerStatusProtos { /** * Protobuf type {@code hbase.pb.RegionServerStartupRequest} */ - public static final class RegionServerStartupRequest extends - com.google.protobuf.GeneratedMessage - implements RegionServerStartupRequestOrBuilder { + public static final class RegionServerStartupRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionServerStartupRequest) + RegionServerStartupRequestOrBuilder { // Use RegionServerStartupRequest.newBuilder() to construct. - private RegionServerStartupRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionServerStartupRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionServerStartupRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionServerStartupRequest defaultInstance; - public static RegionServerStartupRequest getDefaultInstance() { - return defaultInstance; } - - public RegionServerStartupRequest getDefaultInstanceForType() { - return defaultInstance; + private RegionServerStartupRequest() { + port_ = 0; + serverStartCode_ = 0L; + serverCurrentTime_ = 0L; + useThisHostnameInstead_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionServerStartupRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -159,8 +157,9 @@ public final class RegionServerStatusProtos { break; } case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; - useThisHostnameInstead_ = input.readBytes(); + useThisHostnameInstead_ = bs; break; } } @@ -169,7 +168,7 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -180,120 +179,101 @@ public final class RegionServerStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionServerStartupRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionServerStartupRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint32 port = 1; public static final int PORT_FIELD_NUMBER = 1; private int port_; /** - * required uint32 port = 1; - * *
      ** Port number this regionserver is up on 
      * 
+ * + * required uint32 port = 1; */ public boolean hasPort() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required uint32 port = 1; - * *
      ** Port number this regionserver is up on 
      * 
+ * + * required uint32 port = 1; */ public int getPort() { return port_; } - // required uint64 server_start_code = 2; public static final int SERVER_START_CODE_FIELD_NUMBER = 2; private long serverStartCode_; /** - * required uint64 server_start_code = 2; - * *
      ** This servers' startcode 
      * 
+ * + * required uint64 server_start_code = 2; */ public boolean hasServerStartCode() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * required uint64 server_start_code = 2; - * *
      ** This servers' startcode 
      * 
+ * + * required uint64 server_start_code = 2; */ public long getServerStartCode() { return serverStartCode_; } - // required uint64 server_current_time = 3; public static final int SERVER_CURRENT_TIME_FIELD_NUMBER = 3; private long serverCurrentTime_; /** - * required uint64 server_current_time = 3; - * *
      ** Current time of the region server in ms 
      * 
+ * + * required uint64 server_current_time = 3; */ public boolean hasServerCurrentTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * required uint64 server_current_time = 3; - * *
      ** Current time of the region server in ms 
      * 
+ * + * required uint64 server_current_time = 3; */ public long getServerCurrentTime() { return serverCurrentTime_; } - // optional string use_this_hostname_instead = 4; public static final int USE_THIS_HOSTNAME_INSTEAD_FIELD_NUMBER = 4; - private java.lang.Object useThisHostnameInstead_; + private volatile java.lang.Object useThisHostnameInstead_; /** - * optional string use_this_hostname_instead = 4; - * *
      ** hostname for region server, optional 
      * 
+ * + * optional string use_this_hostname_instead = 4; */ public boolean hasUseThisHostnameInstead() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional string use_this_hostname_instead = 4; - * *
      ** hostname for region server, optional 
      * 
+ * + * optional string use_this_hostname_instead = 4; */ public java.lang.String getUseThisHostnameInstead() { java.lang.Object ref = useThisHostnameInstead_; @@ -310,11 +290,11 @@ public final class RegionServerStatusProtos { } } /** - * optional string use_this_hostname_instead = 4; - * *
      ** hostname for region server, optional 
      * 
+ * + * optional string use_this_hostname_instead = 4; */ public com.google.protobuf.ByteString getUseThisHostnameInsteadBytes() { @@ -330,16 +310,11 @@ public final class RegionServerStatusProtos { } } - private void initFields() { - port_ = 0; - serverStartCode_ = 0L; - serverCurrentTime_ = 0L; - useThisHostnameInstead_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasPort()) { memoizedIsInitialized = 0; @@ -359,7 +334,6 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, port_); } @@ -370,14 +344,13 @@ public final class RegionServerStatusProtos { output.writeUInt64(3, serverCurrentTime_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, getUseThisHostnameInsteadBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, useThisHostnameInstead_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -394,22 +367,15 @@ public final class RegionServerStatusProtos { .computeUInt64Size(3, serverCurrentTime_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getUseThisHostnameInsteadBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, useThisHostnameInstead_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -440,12 +406,10 @@ public final class RegionServerStatusProtos { result = result && getUseThisHostnameInstead() .equals(other.getUseThisHostnameInstead()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -459,17 +423,19 @@ public final class RegionServerStatusProtos { } if (hasServerStartCode()) { hash = (37 * hash) + SERVER_START_CODE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getServerStartCode()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getServerStartCode()); } if (hasServerCurrentTime()) { hash = (37 * hash) + SERVER_CURRENT_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getServerCurrentTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getServerCurrentTime()); } if (hasUseThisHostnameInstead()) { hash = (37 * hash) + USE_THIS_HOSTNAME_INSTEAD_FIELD_NUMBER; hash = (53 * hash) + getUseThisHostnameInstead().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -497,46 +463,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -544,14 +521,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.RegionServerStartupRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionServerStartupRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -564,18 +542,15 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); port_ = 0; @@ -589,10 +564,6 @@ public final class RegionServerStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_descriptor; @@ -635,6 +606,32 @@ public final class RegionServerStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)other); @@ -660,21 +657,19 @@ public final class RegionServerStatusProtos { useThisHostnameInstead_ = other.useThisHostnameInstead_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasPort()) { - return false; } if (!hasServerStartCode()) { - return false; } if (!hasServerCurrentTime()) { - return false; } return true; @@ -689,7 +684,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -699,34 +694,33 @@ public final class RegionServerStatusProtos { } private int bitField0_; - // required uint32 port = 1; private int port_ ; /** - * required uint32 port = 1; - * *
        ** Port number this regionserver is up on 
        * 
+ * + * required uint32 port = 1; */ public boolean hasPort() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required uint32 port = 1; - * *
        ** Port number this regionserver is up on 
        * 
+ * + * required uint32 port = 1; */ public int getPort() { return port_; } /** - * required uint32 port = 1; - * *
        ** Port number this regionserver is up on 
        * 
+ * + * required uint32 port = 1; */ public Builder setPort(int value) { bitField0_ |= 0x00000001; @@ -735,11 +729,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required uint32 port = 1; - * *
        ** Port number this regionserver is up on 
        * 
+ * + * required uint32 port = 1; */ public Builder clearPort() { bitField0_ = (bitField0_ & ~0x00000001); @@ -748,34 +742,33 @@ public final class RegionServerStatusProtos { return this; } - // required uint64 server_start_code = 2; private long serverStartCode_ ; /** - * required uint64 server_start_code = 2; - * *
        ** This servers' startcode 
        * 
+ * + * required uint64 server_start_code = 2; */ public boolean hasServerStartCode() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * required uint64 server_start_code = 2; - * *
        ** This servers' startcode 
        * 
+ * + * required uint64 server_start_code = 2; */ public long getServerStartCode() { return serverStartCode_; } /** - * required uint64 server_start_code = 2; - * *
        ** This servers' startcode 
        * 
+ * + * required uint64 server_start_code = 2; */ public Builder setServerStartCode(long value) { bitField0_ |= 0x00000002; @@ -784,11 +777,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required uint64 server_start_code = 2; - * *
        ** This servers' startcode 
        * 
+ * + * required uint64 server_start_code = 2; */ public Builder clearServerStartCode() { bitField0_ = (bitField0_ & ~0x00000002); @@ -797,34 +790,33 @@ public final class RegionServerStatusProtos { return this; } - // required uint64 server_current_time = 3; private long serverCurrentTime_ ; /** - * required uint64 server_current_time = 3; - * *
        ** Current time of the region server in ms 
        * 
+ * + * required uint64 server_current_time = 3; */ public boolean hasServerCurrentTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * required uint64 server_current_time = 3; - * *
        ** Current time of the region server in ms 
        * 
+ * + * required uint64 server_current_time = 3; */ public long getServerCurrentTime() { return serverCurrentTime_; } /** - * required uint64 server_current_time = 3; - * *
        ** Current time of the region server in ms 
        * 
+ * + * required uint64 server_current_time = 3; */ public Builder setServerCurrentTime(long value) { bitField0_ |= 0x00000004; @@ -833,11 +825,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required uint64 server_current_time = 3; - * *
        ** Current time of the region server in ms 
        * 
+ * + * required uint64 server_current_time = 3; */ public Builder clearServerCurrentTime() { bitField0_ = (bitField0_ & ~0x00000004); @@ -846,42 +838,44 @@ public final class RegionServerStatusProtos { return this; } - // optional string use_this_hostname_instead = 4; private java.lang.Object useThisHostnameInstead_ = ""; /** - * optional string use_this_hostname_instead = 4; - * *
        ** hostname for region server, optional 
        * 
+ * + * optional string use_this_hostname_instead = 4; */ public boolean hasUseThisHostnameInstead() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional string use_this_hostname_instead = 4; - * *
        ** hostname for region server, optional 
        * 
+ * + * optional string use_this_hostname_instead = 4; */ public java.lang.String getUseThisHostnameInstead() { java.lang.Object ref = useThisHostnameInstead_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - useThisHostnameInstead_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + useThisHostnameInstead_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * optional string use_this_hostname_instead = 4; - * *
        ** hostname for region server, optional 
        * 
+ * + * optional string use_this_hostname_instead = 4; */ public com.google.protobuf.ByteString getUseThisHostnameInsteadBytes() { @@ -897,11 +891,11 @@ public final class RegionServerStatusProtos { } } /** - * optional string use_this_hostname_instead = 4; - * *
        ** hostname for region server, optional 
        * 
+ * + * optional string use_this_hostname_instead = 4; */ public Builder setUseThisHostnameInstead( java.lang.String value) { @@ -914,11 +908,11 @@ public final class RegionServerStatusProtos { return this; } /** - * optional string use_this_hostname_instead = 4; - * *
        ** hostname for region server, optional 
        * 
+ * + * optional string use_this_hostname_instead = 4; */ public Builder clearUseThisHostnameInstead() { bitField0_ = (bitField0_ & ~0x00000008); @@ -927,11 +921,11 @@ public final class RegionServerStatusProtos { return this; } /** - * optional string use_this_hostname_instead = 4; - * *
        ** hostname for region server, optional 
        * 
+ * + * optional string use_this_hostname_instead = 4; */ public Builder setUseThisHostnameInsteadBytes( com.google.protobuf.ByteString value) { @@ -943,77 +937,114 @@ public final class RegionServerStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionServerStartupRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerStartupRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest DEFAULT_INSTANCE; static { - defaultInstance = new RegionServerStartupRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionServerStartupRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerStartupRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerStartupRequest) } - public interface RegionServerStartupResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionServerStartupResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionServerStartupResponse) + com.google.protobuf.MessageOrBuilder { - // repeated .hbase.pb.NameStringPair map_entries = 1; /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
      **
      * Configuration for the regionserver to use: e.g. filesystem,
      * hbase rootdir, the hostname to use creating the RegionServer ServerName,
      * etc
      * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ java.util.List getMapEntriesList(); /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
      **
      * Configuration for the regionserver to use: e.g. filesystem,
      * hbase rootdir, the hostname to use creating the RegionServer ServerName,
      * etc
      * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index); /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
      **
      * Configuration for the regionserver to use: e.g. filesystem,
      * hbase rootdir, the hostname to use creating the RegionServer ServerName,
      * etc
      * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ int getMapEntriesCount(); /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
      **
      * Configuration for the regionserver to use: e.g. filesystem,
      * hbase rootdir, the hostname to use creating the RegionServer ServerName,
      * etc
      * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ java.util.List getMapEntriesOrBuilderList(); /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
      **
      * Configuration for the regionserver to use: e.g. filesystem,
      * hbase rootdir, the hostname to use creating the RegionServer ServerName,
      * etc
      * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( int index); @@ -1021,36 +1052,28 @@ public final class RegionServerStatusProtos { /** * Protobuf type {@code hbase.pb.RegionServerStartupResponse} */ - public static final class RegionServerStartupResponse extends - com.google.protobuf.GeneratedMessage - implements RegionServerStartupResponseOrBuilder { + public static final class RegionServerStartupResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionServerStartupResponse) + RegionServerStartupResponseOrBuilder { // Use RegionServerStartupResponse.newBuilder() to construct. - private RegionServerStartupResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionServerStartupResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RegionServerStartupResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionServerStartupResponse defaultInstance; - public static RegionServerStartupResponse getDefaultInstance() { - return defaultInstance; - } - - public RegionServerStartupResponse getDefaultInstanceForType() { - return defaultInstance; + private RegionServerStartupResponse() { + mapEntries_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionServerStartupResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1074,7 +1097,8 @@ public final class RegionServerStatusProtos { mapEntries_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - mapEntries_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); + mapEntries_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } @@ -1083,7 +1107,7 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { mapEntries_ = java.util.Collections.unmodifiableList(mapEntries_); @@ -1097,106 +1121,88 @@ public final class RegionServerStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionServerStartupResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionServerStartupResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .hbase.pb.NameStringPair map_entries = 1; public static final int MAP_ENTRIES_FIELD_NUMBER = 1; private java.util.List mapEntries_; /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
      **
      * Configuration for the regionserver to use: e.g. filesystem,
      * hbase rootdir, the hostname to use creating the RegionServer ServerName,
      * etc
      * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public java.util.List getMapEntriesList() { return mapEntries_; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
      **
      * Configuration for the regionserver to use: e.g. filesystem,
      * hbase rootdir, the hostname to use creating the RegionServer ServerName,
      * etc
      * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public java.util.List getMapEntriesOrBuilderList() { return mapEntries_; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
      **
      * Configuration for the regionserver to use: e.g. filesystem,
      * hbase rootdir, the hostname to use creating the RegionServer ServerName,
      * etc
      * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public int getMapEntriesCount() { return mapEntries_.size(); } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
      **
      * Configuration for the regionserver to use: e.g. filesystem,
      * hbase rootdir, the hostname to use creating the RegionServer ServerName,
      * etc
      * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) { return mapEntries_.get(index); } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
      **
      * Configuration for the regionserver to use: e.g. filesystem,
      * hbase rootdir, the hostname to use creating the RegionServer ServerName,
      * etc
      * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( int index) { return mapEntries_.get(index); } - private void initFields() { - mapEntries_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; for (int i = 0; i < getMapEntriesCount(); i++) { if (!getMapEntries(i).isInitialized()) { @@ -1210,16 +1216,14 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); for (int i = 0; i < mapEntries_.size(); i++) { output.writeMessage(1, mapEntries_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -1227,19 +1231,13 @@ public final class RegionServerStatusProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, mapEntries_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1252,12 +1250,10 @@ public final class RegionServerStatusProtos { boolean result = true; result = result && getMapEntriesList() .equals(other.getMapEntriesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1269,7 +1265,7 @@ public final class RegionServerStatusProtos { hash = (37 * hash) + MAP_ENTRIES_FIELD_NUMBER; hash = (53 * hash) + getMapEntriesList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1297,46 +1293,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1344,14 +1351,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.RegionServerStartupResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionServerStartupResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1364,19 +1372,16 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getMapEntriesFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (mapEntriesBuilder_ == null) { @@ -1388,10 +1393,6 @@ public final class RegionServerStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_descriptor; @@ -1425,6 +1426,32 @@ public final class RegionServerStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)other); @@ -1455,21 +1482,21 @@ public final class RegionServerStatusProtos { mapEntries_ = other.mapEntries_; bitField0_ = (bitField0_ & ~0x00000001); mapEntriesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMapEntriesFieldBuilder() : null; } else { mapEntriesBuilder_.addAllMessages(other.mapEntries_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getMapEntriesCount(); i++) { if (!getMapEntries(i).isInitialized()) { - return false; } } @@ -1485,7 +1512,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1495,7 +1522,6 @@ public final class RegionServerStatusProtos { } private int bitField0_; - // repeated .hbase.pb.NameStringPair map_entries = 1; private java.util.List mapEntries_ = java.util.Collections.emptyList(); private void ensureMapEntriesIsMutable() { @@ -1505,18 +1531,18 @@ public final class RegionServerStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> mapEntriesBuilder_; /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public java.util.List getMapEntriesList() { if (mapEntriesBuilder_ == null) { @@ -1526,14 +1552,14 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public int getMapEntriesCount() { if (mapEntriesBuilder_ == null) { @@ -1543,14 +1569,14 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) { if (mapEntriesBuilder_ == null) { @@ -1560,14 +1586,14 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public Builder setMapEntries( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { @@ -1584,14 +1610,14 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public Builder setMapEntries( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { @@ -1605,14 +1631,14 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public Builder addMapEntries(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (mapEntriesBuilder_ == null) { @@ -1628,14 +1654,14 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public Builder addMapEntries( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { @@ -1652,14 +1678,14 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public Builder addMapEntries( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { @@ -1673,14 +1699,14 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public Builder addMapEntries( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { @@ -1694,20 +1720,21 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public Builder addAllMapEntries( java.lang.Iterable values) { if (mapEntriesBuilder_ == null) { ensureMapEntriesIsMutable(); - super.addAll(values, mapEntries_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, mapEntries_); onChanged(); } else { mapEntriesBuilder_.addAllMessages(values); @@ -1715,14 +1742,14 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public Builder clearMapEntries() { if (mapEntriesBuilder_ == null) { @@ -1735,14 +1762,14 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public Builder removeMapEntries(int index) { if (mapEntriesBuilder_ == null) { @@ -1755,28 +1782,28 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder getMapEntriesBuilder( int index) { return getMapEntriesFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( int index) { @@ -1786,14 +1813,14 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public java.util.List getMapEntriesOrBuilderList() { @@ -1804,28 +1831,28 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder() { return getMapEntriesFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder( int index) { @@ -1833,24 +1860,24 @@ public final class RegionServerStatusProtos { index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** - * repeated .hbase.pb.NameStringPair map_entries = 1; - * *
        **
        * Configuration for the regionserver to use: e.g. filesystem,
        * hbase rootdir, the hostname to use creating the RegionServer ServerName,
        * etc
        * 
+ * + * repeated .hbase.pb.NameStringPair map_entries = 1; */ public java.util.List getMapEntriesBuilderList() { return getMapEntriesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getMapEntriesFieldBuilder() { if (mapEntriesBuilder_ == null) { - mapEntriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + mapEntriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( mapEntries_, ((bitField0_ & 0x00000001) == 0x00000001), @@ -1860,22 +1887,59 @@ public final class RegionServerStatusProtos { } return mapEntriesBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionServerStartupResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerStartupResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse DEFAULT_INSTANCE; static { - defaultInstance = new RegionServerStartupResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionServerStartupResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerStartupResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerStartupResponse) } - public interface RegionServerReportRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionServerReportRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionServerReportRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ServerName server = 1; /** * required .hbase.pb.ServerName server = 1; */ @@ -1889,65 +1953,55 @@ public final class RegionServerStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - // optional .hbase.pb.ServerLoad load = 2; /** - * optional .hbase.pb.ServerLoad load = 2; - * *
      ** load the server is under 
      * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ boolean hasLoad(); /** - * optional .hbase.pb.ServerLoad load = 2; - * *
      ** load the server is under 
      * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad getLoad(); /** - * optional .hbase.pb.ServerLoad load = 2; - * *
      ** load the server is under 
      * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder getLoadOrBuilder(); } /** * Protobuf type {@code hbase.pb.RegionServerReportRequest} */ - public static final class RegionServerReportRequest extends - com.google.protobuf.GeneratedMessage - implements RegionServerReportRequestOrBuilder { + public static final class RegionServerReportRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionServerReportRequest) + RegionServerReportRequestOrBuilder { // Use RegionServerReportRequest.newBuilder() to construct. - private RegionServerReportRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionServerReportRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionServerReportRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionServerReportRequest defaultInstance; - public static RegionServerReportRequest getDefaultInstance() { - return defaultInstance; } - - public RegionServerReportRequest getDefaultInstanceForType() { - return defaultInstance; + private RegionServerReportRequest() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionServerReportRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1998,7 +2052,7 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2009,30 +2063,14 @@ public final class RegionServerStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionServerReportRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionServerReportRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ServerName server = 1; public static final int SERVER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_; /** @@ -2045,57 +2083,53 @@ public final class RegionServerStatusProtos { * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } /** * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } - // optional .hbase.pb.ServerLoad load = 2; public static final int LOAD_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad load_; /** - * optional .hbase.pb.ServerLoad load = 2; - * *
      ** load the server is under 
      * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public boolean hasLoad() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional .hbase.pb.ServerLoad load = 2; - * *
      ** load the server is under 
      * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad getLoad() { - return load_; + return load_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance() : load_; } /** - * optional .hbase.pb.ServerLoad load = 2; - * *
      ** load the server is under 
      * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder getLoadOrBuilder() { - return load_; + return load_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance() : load_; } - private void initFields() { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - load_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasServer()) { memoizedIsInitialized = 0; @@ -2117,43 +2151,35 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, server_); + output.writeMessage(1, getServer()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, load_); + output.writeMessage(2, getLoad()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, server_); + .computeMessageSize(1, getServer()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, load_); + .computeMessageSize(2, getLoad()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2174,12 +2200,10 @@ public final class RegionServerStatusProtos { result = result && getLoad() .equals(other.getLoad()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2195,7 +2219,7 @@ public final class RegionServerStatusProtos { hash = (37 * hash) + LOAD_FIELD_NUMBER; hash = (53 * hash) + getLoad().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2223,46 +2247,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2270,14 +2305,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.RegionServerReportRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionServerReportRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2290,30 +2326,27 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getServerFieldBuilder(); getLoadFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; } else { serverBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (loadBuilder_ == null) { - load_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance(); + load_ = null; } else { loadBuilder_.clear(); } @@ -2321,10 +2354,6 @@ public final class RegionServerStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_descriptor; @@ -2367,6 +2396,32 @@ public final class RegionServerStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)other); @@ -2384,22 +2439,20 @@ public final class RegionServerStatusProtos { if (other.hasLoad()) { mergeLoad(other.getLoad()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasServer()) { - return false; } if (!getServer().isInitialized()) { - return false; } if (hasLoad()) { if (!getLoad().isInitialized()) { - return false; } } @@ -2415,7 +2468,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2425,9 +2478,8 @@ public final class RegionServerStatusProtos { } private int bitField0_; - // required .hbase.pb.ServerName server = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; /** * required .hbase.pb.ServerName server = 1; @@ -2440,7 +2492,7 @@ public final class RegionServerStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } else { return serverBuilder_.getMessage(); } @@ -2481,6 +2533,7 @@ public final class RegionServerStatusProtos { public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); @@ -2499,7 +2552,7 @@ public final class RegionServerStatusProtos { */ public Builder clearServer() { if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; onChanged(); } else { serverBuilder_.clear(); @@ -2522,19 +2575,20 @@ public final class RegionServerStatusProtos { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); } else { - return server_; + return server_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } } /** * required .hbase.pb.ServerName server = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, + getServer(), getParentForChildren(), isClean()); server_ = null; @@ -2542,40 +2596,39 @@ public final class RegionServerStatusProtos { return serverBuilder_; } - // optional .hbase.pb.ServerLoad load = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad load_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad load_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder> loadBuilder_; /** - * optional .hbase.pb.ServerLoad load = 2; - * *
        ** load the server is under 
        * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public boolean hasLoad() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional .hbase.pb.ServerLoad load = 2; - * *
        ** load the server is under 
        * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad getLoad() { if (loadBuilder_ == null) { - return load_; + return load_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance() : load_; } else { return loadBuilder_.getMessage(); } } /** - * optional .hbase.pb.ServerLoad load = 2; - * *
        ** load the server is under 
        * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public Builder setLoad(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad value) { if (loadBuilder_ == null) { @@ -2591,11 +2644,11 @@ public final class RegionServerStatusProtos { return this; } /** - * optional .hbase.pb.ServerLoad load = 2; - * *
        ** load the server is under 
        * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public Builder setLoad( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder builderForValue) { @@ -2609,15 +2662,16 @@ public final class RegionServerStatusProtos { return this; } /** - * optional .hbase.pb.ServerLoad load = 2; - * *
        ** load the server is under 
        * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public Builder mergeLoad(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad value) { if (loadBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + load_ != null && load_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance()) { load_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.newBuilder(load_).mergeFrom(value).buildPartial(); @@ -2632,15 +2686,15 @@ public final class RegionServerStatusProtos { return this; } /** - * optional .hbase.pb.ServerLoad load = 2; - * *
        ** load the server is under 
        * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public Builder clearLoad() { if (loadBuilder_ == null) { - load_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance(); + load_ = null; onChanged(); } else { loadBuilder_.clear(); @@ -2649,11 +2703,11 @@ public final class RegionServerStatusProtos { return this; } /** - * optional .hbase.pb.ServerLoad load = 2; - * *
        ** load the server is under 
        * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder getLoadBuilder() { bitField0_ |= 0x00000002; @@ -2661,87 +2715,117 @@ public final class RegionServerStatusProtos { return getLoadFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.ServerLoad load = 2; - * *
        ** load the server is under 
        * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder getLoadOrBuilder() { if (loadBuilder_ != null) { return loadBuilder_.getMessageOrBuilder(); } else { - return load_; + return load_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance() : load_; } } /** - * optional .hbase.pb.ServerLoad load = 2; - * *
        ** load the server is under 
        * 
+ * + * optional .hbase.pb.ServerLoad load = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder> getLoadFieldBuilder() { if (loadBuilder_ == null) { - loadBuilder_ = new com.google.protobuf.SingleFieldBuilder< + loadBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder>( - load_, + getLoad(), getParentForChildren(), isClean()); load_ = null; } return loadBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionServerReportRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerReportRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest DEFAULT_INSTANCE; static { - defaultInstance = new RegionServerReportRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionServerReportRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerReportRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerReportRequest) } - public interface RegionServerReportResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionServerReportResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionServerReportResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.RegionServerReportResponse} */ - public static final class RegionServerReportResponse extends - com.google.protobuf.GeneratedMessage - implements RegionServerReportResponseOrBuilder { + public static final class RegionServerReportResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionServerReportResponse) + RegionServerReportResponseOrBuilder { // Use RegionServerReportResponse.newBuilder() to construct. - private RegionServerReportResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionServerReportResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RegionServerReportResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionServerReportResponse defaultInstance; - public static RegionServerReportResponse getDefaultInstance() { - return defaultInstance; - } - - public RegionServerReportResponse getDefaultInstanceForType() { - return defaultInstance; + private RegionServerReportResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionServerReportResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -2765,7 +2849,7 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2776,34 +2860,18 @@ public final class RegionServerStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionServerReportResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionServerReportResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -2811,29 +2879,21 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2844,12 +2904,10 @@ public final class RegionServerStatusProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2857,7 +2915,7 @@ public final class RegionServerStatusProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2885,46 +2943,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2932,14 +3001,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.RegionServerReportResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionServerReportResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2952,27 +3022,20 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportResponse_descriptor; @@ -2996,6 +3059,32 @@ public final class RegionServerStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse)other); @@ -3007,7 +3096,8 @@ public final class RegionServerStatusProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -3024,7 +3114,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3032,70 +3122,106 @@ public final class RegionServerStatusProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionServerReportResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerReportResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse DEFAULT_INSTANCE; static { - defaultInstance = new RegionServerReportResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionServerReportResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerReportResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerReportResponse) } - public interface ReportRSFatalErrorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReportRSFatalErrorRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReportRSFatalErrorRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ServerName server = 1; /** - * required .hbase.pb.ServerName server = 1; - * *
      ** name of the server experiencing the error 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ boolean hasServer(); /** - * required .hbase.pb.ServerName server = 1; - * *
      ** name of the server experiencing the error 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer(); /** - * required .hbase.pb.ServerName server = 1; - * *
      ** name of the server experiencing the error 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - // required string error_message = 2; /** - * required string error_message = 2; - * *
      ** informative text to expose in the master logs and UI 
      * 
+ * + * required string error_message = 2; */ boolean hasErrorMessage(); /** - * required string error_message = 2; - * *
      ** informative text to expose in the master logs and UI 
      * 
+ * + * required string error_message = 2; */ java.lang.String getErrorMessage(); /** - * required string error_message = 2; - * *
      ** informative text to expose in the master logs and UI 
      * 
+ * + * required string error_message = 2; */ com.google.protobuf.ByteString getErrorMessageBytes(); @@ -3103,36 +3229,28 @@ public final class RegionServerStatusProtos { /** * Protobuf type {@code hbase.pb.ReportRSFatalErrorRequest} */ - public static final class ReportRSFatalErrorRequest extends - com.google.protobuf.GeneratedMessage - implements ReportRSFatalErrorRequestOrBuilder { + public static final class ReportRSFatalErrorRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReportRSFatalErrorRequest) + ReportRSFatalErrorRequestOrBuilder { // Use ReportRSFatalErrorRequest.newBuilder() to construct. - private ReportRSFatalErrorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReportRSFatalErrorRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReportRSFatalErrorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReportRSFatalErrorRequest defaultInstance; - public static ReportRSFatalErrorRequest getDefaultInstance() { - return defaultInstance; } - - public ReportRSFatalErrorRequest getDefaultInstanceForType() { - return defaultInstance; + private ReportRSFatalErrorRequest() { + errorMessage_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReportRSFatalErrorRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3165,8 +3283,9 @@ public final class RegionServerStatusProtos { break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - errorMessage_ = input.readBytes(); + errorMessage_ = bs; break; } } @@ -3175,93 +3294,76 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReportRSFatalErrorRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReportRSFatalErrorRequest(input, extensionRegistry); - } - }; + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor; + } - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest.Builder.class); } private int bitField0_; - // required .hbase.pb.ServerName server = 1; public static final int SERVER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_; /** - * required .hbase.pb.ServerName server = 1; - * *
      ** name of the server experiencing the error 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.ServerName server = 1; - * *
      ** name of the server experiencing the error 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } /** - * required .hbase.pb.ServerName server = 1; - * *
      ** name of the server experiencing the error 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } - // required string error_message = 2; public static final int ERROR_MESSAGE_FIELD_NUMBER = 2; - private java.lang.Object errorMessage_; + private volatile java.lang.Object errorMessage_; /** - * required string error_message = 2; - * *
      ** informative text to expose in the master logs and UI 
      * 
+ * + * required string error_message = 2; */ public boolean hasErrorMessage() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * required string error_message = 2; - * *
      ** informative text to expose in the master logs and UI 
      * 
+ * + * required string error_message = 2; */ public java.lang.String getErrorMessage() { java.lang.Object ref = errorMessage_; @@ -3278,11 +3380,11 @@ public final class RegionServerStatusProtos { } } /** - * required string error_message = 2; - * *
      ** informative text to expose in the master logs and UI 
      * 
+ * + * required string error_message = 2; */ public com.google.protobuf.ByteString getErrorMessageBytes() { @@ -3298,14 +3400,11 @@ public final class RegionServerStatusProtos { } } - private void initFields() { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - errorMessage_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasServer()) { memoizedIsInitialized = 0; @@ -3325,43 +3424,34 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, server_); + output.writeMessage(1, getServer()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getErrorMessageBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, errorMessage_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, server_); + .computeMessageSize(1, getServer()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getErrorMessageBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, errorMessage_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3382,12 +3472,10 @@ public final class RegionServerStatusProtos { result = result && getErrorMessage() .equals(other.getErrorMessage()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3403,7 +3491,7 @@ public final class RegionServerStatusProtos { hash = (37 * hash) + ERROR_MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getErrorMessage().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3431,46 +3519,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3478,14 +3577,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.ReportRSFatalErrorRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReportRSFatalErrorRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3498,23 +3598,20 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getServerFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; } else { serverBuilder_.clear(); } @@ -3524,10 +3621,6 @@ public final class RegionServerStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor; @@ -3566,6 +3659,32 @@ public final class RegionServerStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest)other); @@ -3585,21 +3704,19 @@ public final class RegionServerStatusProtos { errorMessage_ = other.errorMessage_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasServer()) { - return false; } if (!hasErrorMessage()) { - return false; } if (!getServer().isInitialized()) { - return false; } return true; @@ -3614,7 +3731,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3624,40 +3741,39 @@ public final class RegionServerStatusProtos { } private int bitField0_; - // required .hbase.pb.ServerName server = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; /** - * required .hbase.pb.ServerName server = 1; - * *
        ** name of the server experiencing the error 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** name of the server experiencing the error 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } else { return serverBuilder_.getMessage(); } } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** name of the server experiencing the error 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder setServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { @@ -3673,11 +3789,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** name of the server experiencing the error 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder setServer( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { @@ -3691,15 +3807,16 @@ public final class RegionServerStatusProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** name of the server experiencing the error 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); @@ -3714,15 +3831,15 @@ public final class RegionServerStatusProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** name of the server experiencing the error 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder clearServer() { if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; onChanged(); } else { serverBuilder_.clear(); @@ -3731,11 +3848,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** name of the server experiencing the error 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000001; @@ -3743,33 +3860,34 @@ public final class RegionServerStatusProtos { return getServerFieldBuilder().getBuilder(); } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** name of the server experiencing the error 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); } else { - return server_; + return server_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** name of the server experiencing the error 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, + getServer(), getParentForChildren(), isClean()); server_ = null; @@ -3777,42 +3895,44 @@ public final class RegionServerStatusProtos { return serverBuilder_; } - // required string error_message = 2; private java.lang.Object errorMessage_ = ""; /** - * required string error_message = 2; - * *
        ** informative text to expose in the master logs and UI 
        * 
+ * + * required string error_message = 2; */ public boolean hasErrorMessage() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * required string error_message = 2; - * *
        ** informative text to expose in the master logs and UI 
        * 
+ * + * required string error_message = 2; */ public java.lang.String getErrorMessage() { java.lang.Object ref = errorMessage_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - errorMessage_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + errorMessage_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * required string error_message = 2; - * *
        ** informative text to expose in the master logs and UI 
        * 
+ * + * required string error_message = 2; */ public com.google.protobuf.ByteString getErrorMessageBytes() { @@ -3828,11 +3948,11 @@ public final class RegionServerStatusProtos { } } /** - * required string error_message = 2; - * *
        ** informative text to expose in the master logs and UI 
        * 
+ * + * required string error_message = 2; */ public Builder setErrorMessage( java.lang.String value) { @@ -3845,11 +3965,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required string error_message = 2; - * *
        ** informative text to expose in the master logs and UI 
        * 
+ * + * required string error_message = 2; */ public Builder clearErrorMessage() { bitField0_ = (bitField0_ & ~0x00000002); @@ -3858,11 +3978,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required string error_message = 2; - * *
        ** informative text to expose in the master logs and UI 
        * 
+ * + * required string error_message = 2; */ public Builder setErrorMessageBytes( com.google.protobuf.ByteString value) { @@ -3874,54 +3994,83 @@ public final class RegionServerStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReportRSFatalErrorRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReportRSFatalErrorRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest DEFAULT_INSTANCE; static { - defaultInstance = new ReportRSFatalErrorRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReportRSFatalErrorRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReportRSFatalErrorRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReportRSFatalErrorRequest) } - public interface ReportRSFatalErrorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReportRSFatalErrorResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReportRSFatalErrorResponse) + com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.ReportRSFatalErrorResponse} */ - public static final class ReportRSFatalErrorResponse extends - com.google.protobuf.GeneratedMessage - implements ReportRSFatalErrorResponseOrBuilder { + public static final class ReportRSFatalErrorResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReportRSFatalErrorResponse) + ReportRSFatalErrorResponseOrBuilder { // Use ReportRSFatalErrorResponse.newBuilder() to construct. - private ReportRSFatalErrorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReportRSFatalErrorResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReportRSFatalErrorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReportRSFatalErrorResponse defaultInstance; - public static ReportRSFatalErrorResponse getDefaultInstance() { - return defaultInstance; } - - public ReportRSFatalErrorResponse getDefaultInstanceForType() { - return defaultInstance; + private ReportRSFatalErrorResponse() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReportRSFatalErrorResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -3945,7 +4094,7 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -3956,34 +4105,18 @@ public final class RegionServerStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReportRSFatalErrorResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReportRSFatalErrorResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -3991,29 +4124,21 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4024,12 +4149,10 @@ public final class RegionServerStatusProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4037,7 +4160,7 @@ public final class RegionServerStatusProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4065,46 +4188,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4112,14 +4246,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.ReportRSFatalErrorResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReportRSFatalErrorResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4132,27 +4267,20 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor; @@ -4176,6 +4304,32 @@ public final class RegionServerStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse)other); @@ -4187,7 +4341,8 @@ public final class RegionServerStatusProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -4204,7 +4359,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4212,72 +4367,101 @@ public final class RegionServerStatusProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReportRSFatalErrorResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReportRSFatalErrorResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse DEFAULT_INSTANCE; static { - defaultInstance = new ReportRSFatalErrorResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReportRSFatalErrorResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReportRSFatalErrorResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReportRSFatalErrorResponse) } - public interface GetLastFlushedSequenceIdRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetLastFlushedSequenceIdRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetLastFlushedSequenceIdRequest) + com.google.protobuf.MessageOrBuilder { - // required bytes region_name = 1; /** - * required bytes region_name = 1; - * *
      ** region name 
      * 
+ * + * required bytes region_name = 1; */ boolean hasRegionName(); /** - * required bytes region_name = 1; - * *
      ** region name 
      * 
+ * + * required bytes region_name = 1; */ com.google.protobuf.ByteString getRegionName(); } /** * Protobuf type {@code hbase.pb.GetLastFlushedSequenceIdRequest} */ - public static final class GetLastFlushedSequenceIdRequest extends - com.google.protobuf.GeneratedMessage - implements GetLastFlushedSequenceIdRequestOrBuilder { + public static final class GetLastFlushedSequenceIdRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetLastFlushedSequenceIdRequest) + GetLastFlushedSequenceIdRequestOrBuilder { // Use GetLastFlushedSequenceIdRequest.newBuilder() to construct. - private GetLastFlushedSequenceIdRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetLastFlushedSequenceIdRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private GetLastFlushedSequenceIdRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetLastFlushedSequenceIdRequest defaultInstance; - public static GetLastFlushedSequenceIdRequest getDefaultInstance() { - return defaultInstance; - } - - public GetLastFlushedSequenceIdRequest getDefaultInstanceForType() { - return defaultInstance; + private GetLastFlushedSequenceIdRequest() { + regionName_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetLastFlushedSequenceIdRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4307,7 +4491,7 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -4318,60 +4502,42 @@ public final class RegionServerStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetLastFlushedSequenceIdRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetLastFlushedSequenceIdRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes region_name = 1; public static final int REGION_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString regionName_; /** - * required bytes region_name = 1; - * *
      ** region name 
      * 
+ * + * required bytes region_name = 1; */ public boolean hasRegionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required bytes region_name = 1; - * *
      ** region name 
      * 
+ * + * required bytes region_name = 1; */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } - private void initFields() { - regionName_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegionName()) { memoizedIsInitialized = 0; @@ -4383,16 +4549,14 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, regionName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -4400,19 +4564,13 @@ public final class RegionServerStatusProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, regionName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4428,12 +4586,10 @@ public final class RegionServerStatusProtos { result = result && getRegionName() .equals(other.getRegionName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4445,7 +4601,7 @@ public final class RegionServerStatusProtos { hash = (37 * hash) + REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getRegionName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4473,46 +4629,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4520,14 +4687,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.GetLastFlushedSequenceIdRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetLastFlushedSequenceIdRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4540,18 +4708,15 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); regionName_ = com.google.protobuf.ByteString.EMPTY; @@ -4559,10 +4724,6 @@ public final class RegionServerStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor; @@ -4592,7 +4753,33 @@ public final class RegionServerStatusProtos { onBuilt(); return result; } - + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)other); @@ -4607,13 +4794,13 @@ public final class RegionServerStatusProtos { if (other.hasRegionName()) { setRegionName(other.getRegionName()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionName()) { - return false; } return true; @@ -4628,7 +4815,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4638,34 +4825,33 @@ public final class RegionServerStatusProtos { } private int bitField0_; - // required bytes region_name = 1; private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; /** - * required bytes region_name = 1; - * *
        ** region name 
        * 
+ * + * required bytes region_name = 1; */ public boolean hasRegionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required bytes region_name = 1; - * *
        ** region name 
        * 
+ * + * required bytes region_name = 1; */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } /** - * required bytes region_name = 1; - * *
        ** region name 
        * 
+ * + * required bytes region_name = 1; */ public Builder setRegionName(com.google.protobuf.ByteString value) { if (value == null) { @@ -4677,11 +4863,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required bytes region_name = 1; - * *
        ** region name 
        * 
+ * + * required bytes region_name = 1; */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000001); @@ -4689,80 +4875,116 @@ public final class RegionServerStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetLastFlushedSequenceIdRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetLastFlushedSequenceIdRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest DEFAULT_INSTANCE; static { - defaultInstance = new GetLastFlushedSequenceIdRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetLastFlushedSequenceIdRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetLastFlushedSequenceIdRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetLastFlushedSequenceIdRequest) } - public interface GetLastFlushedSequenceIdResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface GetLastFlushedSequenceIdResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetLastFlushedSequenceIdResponse) + com.google.protobuf.MessageOrBuilder { - // required uint64 last_flushed_sequence_id = 1; /** - * required uint64 last_flushed_sequence_id = 1; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for the region 
      * 
+ * + * required uint64 last_flushed_sequence_id = 1; */ boolean hasLastFlushedSequenceId(); /** - * required uint64 last_flushed_sequence_id = 1; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for the region 
      * 
+ * + * required uint64 last_flushed_sequence_id = 1; */ long getLastFlushedSequenceId(); - // repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ java.util.List getStoreLastFlushedSequenceIdList(); /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getStoreLastFlushedSequenceId(int index); /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ int getStoreLastFlushedSequenceIdCount(); /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ java.util.List getStoreLastFlushedSequenceIdOrBuilderList(); /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder getStoreLastFlushedSequenceIdOrBuilder( int index); @@ -4770,36 +4992,29 @@ public final class RegionServerStatusProtos { /** * Protobuf type {@code hbase.pb.GetLastFlushedSequenceIdResponse} */ - public static final class GetLastFlushedSequenceIdResponse extends - com.google.protobuf.GeneratedMessage - implements GetLastFlushedSequenceIdResponseOrBuilder { + public static final class GetLastFlushedSequenceIdResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetLastFlushedSequenceIdResponse) + GetLastFlushedSequenceIdResponseOrBuilder { // Use GetLastFlushedSequenceIdResponse.newBuilder() to construct. - private GetLastFlushedSequenceIdResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private GetLastFlushedSequenceIdResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private GetLastFlushedSequenceIdResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final GetLastFlushedSequenceIdResponse defaultInstance; - public static GetLastFlushedSequenceIdResponse getDefaultInstance() { - return defaultInstance; } - - public GetLastFlushedSequenceIdResponse getDefaultInstanceForType() { - return defaultInstance; + private GetLastFlushedSequenceIdResponse() { + lastFlushedSequenceId_ = 0L; + storeLastFlushedSequenceId_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private GetLastFlushedSequenceIdResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4828,7 +5043,8 @@ public final class RegionServerStatusProtos { storeLastFlushedSequenceId_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - storeLastFlushedSequenceId_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.PARSER, extensionRegistry)); + storeLastFlushedSequenceId_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.PARSER, extensionRegistry)); break; } } @@ -4837,7 +5053,7 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { storeLastFlushedSequenceId_ = java.util.Collections.unmodifiableList(storeLastFlushedSequenceId_); @@ -4851,117 +5067,97 @@ public final class RegionServerStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public GetLastFlushedSequenceIdResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetLastFlushedSequenceIdResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required uint64 last_flushed_sequence_id = 1; public static final int LAST_FLUSHED_SEQUENCE_ID_FIELD_NUMBER = 1; private long lastFlushedSequenceId_; /** - * required uint64 last_flushed_sequence_id = 1; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for the region 
      * 
+ * + * required uint64 last_flushed_sequence_id = 1; */ public boolean hasLastFlushedSequenceId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required uint64 last_flushed_sequence_id = 1; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for the region 
      * 
+ * + * required uint64 last_flushed_sequence_id = 1; */ public long getLastFlushedSequenceId() { return lastFlushedSequenceId_; } - // repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; public static final int STORE_LAST_FLUSHED_SEQUENCE_ID_FIELD_NUMBER = 2; private java.util.List storeLastFlushedSequenceId_; /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public java.util.List getStoreLastFlushedSequenceIdList() { return storeLastFlushedSequenceId_; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public java.util.List getStoreLastFlushedSequenceIdOrBuilderList() { return storeLastFlushedSequenceId_; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public int getStoreLastFlushedSequenceIdCount() { return storeLastFlushedSequenceId_.size(); } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getStoreLastFlushedSequenceId(int index) { return storeLastFlushedSequenceId_.get(index); } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
      ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
      * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder getStoreLastFlushedSequenceIdOrBuilder( int index) { return storeLastFlushedSequenceId_.get(index); } - private void initFields() { - lastFlushedSequenceId_ = 0L; - storeLastFlushedSequenceId_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasLastFlushedSequenceId()) { memoizedIsInitialized = 0; @@ -4979,19 +5175,17 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, lastFlushedSequenceId_); } for (int i = 0; i < storeLastFlushedSequenceId_.size(); i++) { output.writeMessage(2, storeLastFlushedSequenceId_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -5003,19 +5197,13 @@ public final class RegionServerStatusProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, storeLastFlushedSequenceId_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5033,12 +5221,10 @@ public final class RegionServerStatusProtos { } result = result && getStoreLastFlushedSequenceIdList() .equals(other.getStoreLastFlushedSequenceIdList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5048,13 +5234,14 @@ public final class RegionServerStatusProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLastFlushedSequenceId()) { hash = (37 * hash) + LAST_FLUSHED_SEQUENCE_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLastFlushedSequenceId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLastFlushedSequenceId()); } if (getStoreLastFlushedSequenceIdCount() > 0) { hash = (37 * hash) + STORE_LAST_FLUSHED_SEQUENCE_ID_FIELD_NUMBER; hash = (53 * hash) + getStoreLastFlushedSequenceIdList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5082,46 +5269,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -5129,14 +5327,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.GetLastFlushedSequenceIdResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetLastFlushedSequenceIdResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5149,19 +5348,16 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getStoreLastFlushedSequenceIdFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); lastFlushedSequenceId_ = 0L; @@ -5175,10 +5371,6 @@ public final class RegionServerStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor; @@ -5218,6 +5410,32 @@ public final class RegionServerStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse)other); @@ -5251,25 +5469,24 @@ public final class RegionServerStatusProtos { storeLastFlushedSequenceId_ = other.storeLastFlushedSequenceId_; bitField0_ = (bitField0_ & ~0x00000002); storeLastFlushedSequenceIdBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStoreLastFlushedSequenceIdFieldBuilder() : null; } else { storeLastFlushedSequenceIdBuilder_.addAllMessages(other.storeLastFlushedSequenceId_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasLastFlushedSequenceId()) { - return false; } for (int i = 0; i < getStoreLastFlushedSequenceIdCount(); i++) { if (!getStoreLastFlushedSequenceId(i).isInitialized()) { - return false; } } @@ -5285,7 +5502,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5295,34 +5512,33 @@ public final class RegionServerStatusProtos { } private int bitField0_; - // required uint64 last_flushed_sequence_id = 1; private long lastFlushedSequenceId_ ; /** - * required uint64 last_flushed_sequence_id = 1; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for the region 
        * 
+ * + * required uint64 last_flushed_sequence_id = 1; */ public boolean hasLastFlushedSequenceId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required uint64 last_flushed_sequence_id = 1; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for the region 
        * 
+ * + * required uint64 last_flushed_sequence_id = 1; */ public long getLastFlushedSequenceId() { return lastFlushedSequenceId_; } /** - * required uint64 last_flushed_sequence_id = 1; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for the region 
        * 
+ * + * required uint64 last_flushed_sequence_id = 1; */ public Builder setLastFlushedSequenceId(long value) { bitField0_ |= 0x00000001; @@ -5331,11 +5547,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required uint64 last_flushed_sequence_id = 1; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for the region 
        * 
+ * + * required uint64 last_flushed_sequence_id = 1; */ public Builder clearLastFlushedSequenceId() { bitField0_ = (bitField0_ & ~0x00000001); @@ -5344,7 +5560,6 @@ public final class RegionServerStatusProtos { return this; } - // repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; private java.util.List storeLastFlushedSequenceId_ = java.util.Collections.emptyList(); private void ensureStoreLastFlushedSequenceIdIsMutable() { @@ -5354,15 +5569,15 @@ public final class RegionServerStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder> storeLastFlushedSequenceIdBuilder_; /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public java.util.List getStoreLastFlushedSequenceIdList() { if (storeLastFlushedSequenceIdBuilder_ == null) { @@ -5372,11 +5587,11 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public int getStoreLastFlushedSequenceIdCount() { if (storeLastFlushedSequenceIdBuilder_ == null) { @@ -5386,11 +5601,11 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getStoreLastFlushedSequenceId(int index) { if (storeLastFlushedSequenceIdBuilder_ == null) { @@ -5400,11 +5615,11 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public Builder setStoreLastFlushedSequenceId( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId value) { @@ -5421,11 +5636,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public Builder setStoreLastFlushedSequenceId( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder builderForValue) { @@ -5439,11 +5654,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public Builder addStoreLastFlushedSequenceId(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId value) { if (storeLastFlushedSequenceIdBuilder_ == null) { @@ -5459,11 +5674,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public Builder addStoreLastFlushedSequenceId( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId value) { @@ -5480,11 +5695,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public Builder addStoreLastFlushedSequenceId( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder builderForValue) { @@ -5498,11 +5713,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public Builder addStoreLastFlushedSequenceId( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder builderForValue) { @@ -5516,17 +5731,18 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public Builder addAllStoreLastFlushedSequenceId( java.lang.Iterable values) { if (storeLastFlushedSequenceIdBuilder_ == null) { ensureStoreLastFlushedSequenceIdIsMutable(); - super.addAll(values, storeLastFlushedSequenceId_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, storeLastFlushedSequenceId_); onChanged(); } else { storeLastFlushedSequenceIdBuilder_.addAllMessages(values); @@ -5534,11 +5750,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public Builder clearStoreLastFlushedSequenceId() { if (storeLastFlushedSequenceIdBuilder_ == null) { @@ -5551,11 +5767,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public Builder removeStoreLastFlushedSequenceId(int index) { if (storeLastFlushedSequenceIdBuilder_ == null) { @@ -5568,22 +5784,22 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder getStoreLastFlushedSequenceIdBuilder( int index) { return getStoreLastFlushedSequenceIdFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder getStoreLastFlushedSequenceIdOrBuilder( int index) { @@ -5593,11 +5809,11 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public java.util.List getStoreLastFlushedSequenceIdOrBuilderList() { @@ -5608,22 +5824,22 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreLastFlushedSequenceIdBuilder() { return getStoreLastFlushedSequenceIdFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()); } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreLastFlushedSequenceIdBuilder( int index) { @@ -5631,21 +5847,21 @@ public final class RegionServerStatusProtos { index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()); } /** - * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; - * *
        ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
        * 
+ * + * repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2; */ public java.util.List getStoreLastFlushedSequenceIdBuilderList() { return getStoreLastFlushedSequenceIdFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder> getStoreLastFlushedSequenceIdFieldBuilder() { if (storeLastFlushedSequenceIdBuilder_ == null) { - storeLastFlushedSequenceIdBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + storeLastFlushedSequenceIdBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder>( storeLastFlushedSequenceId_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -5655,22 +5871,59 @@ public final class RegionServerStatusProtos { } return storeLastFlushedSequenceIdBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.GetLastFlushedSequenceIdResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.GetLastFlushedSequenceIdResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse DEFAULT_INSTANCE; static { - defaultInstance = new GetLastFlushedSequenceIdResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GetLastFlushedSequenceIdResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetLastFlushedSequenceIdResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.GetLastFlushedSequenceIdResponse) } - public interface RegionStateTransitionOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionStateTransitionOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionStateTransition) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionStateTransition.TransitionCode transition_code = 1; /** * required .hbase.pb.RegionStateTransition.TransitionCode transition_code = 1; */ @@ -5680,102 +5933,94 @@ public final class RegionServerStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode getTransitionCode(); - // repeated .hbase.pb.RegionInfo region_info = 2; /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
      ** Mutliple regions are involved during merging/splitting 
      * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ java.util.List getRegionInfoList(); /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
      ** Mutliple regions are involved during merging/splitting 
      * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index); /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
      ** Mutliple regions are involved during merging/splitting 
      * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ int getRegionInfoCount(); /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
      ** Mutliple regions are involved during merging/splitting 
      * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ java.util.List getRegionInfoOrBuilderList(); /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
      ** Mutliple regions are involved during merging/splitting 
      * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( int index); - // optional uint64 open_seq_num = 3; /** - * optional uint64 open_seq_num = 3; - * *
      ** For newly opened region, the open seq num is needed 
      * 
+ * + * optional uint64 open_seq_num = 3; */ boolean hasOpenSeqNum(); /** - * optional uint64 open_seq_num = 3; - * *
      ** For newly opened region, the open seq num is needed 
      * 
+ * + * optional uint64 open_seq_num = 3; */ long getOpenSeqNum(); } /** * Protobuf type {@code hbase.pb.RegionStateTransition} */ - public static final class RegionStateTransition extends - com.google.protobuf.GeneratedMessage - implements RegionStateTransitionOrBuilder { + public static final class RegionStateTransition extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionStateTransition) + RegionStateTransitionOrBuilder { // Use RegionStateTransition.newBuilder() to construct. - private RegionStateTransition(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionStateTransition(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionStateTransition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionStateTransition defaultInstance; - public static RegionStateTransition getDefaultInstance() { - return defaultInstance; } - - public RegionStateTransition getDefaultInstanceForType() { - return defaultInstance; + private RegionStateTransition() { + transitionCode_ = 0; + regionInfo_ = java.util.Collections.emptyList(); + openSeqNum_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionStateTransition( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -5801,7 +6046,7 @@ public final class RegionServerStatusProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - transitionCode_ = value; + transitionCode_ = rawValue; } break; } @@ -5810,7 +6055,8 @@ public final class RegionServerStatusProtos { regionInfo_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); + regionInfo_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); break; } case 24: { @@ -5824,7 +6070,7 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); @@ -5838,28 +6084,13 @@ public final class RegionServerStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionStateTransition_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionStateTransition_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionStateTransition parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionStateTransition(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.RegionStateTransition.TransitionCode} */ @@ -5868,55 +6099,55 @@ public final class RegionServerStatusProtos { /** * OPENED = 0; */ - OPENED(0, 0), + OPENED(0), /** * FAILED_OPEN = 1; */ - FAILED_OPEN(1, 1), + FAILED_OPEN(1), /** - * CLOSED = 2; - * *
        ** No failed_close, in which case region server will abort 
        * 
+ * + * CLOSED = 2; */ - CLOSED(2, 2), + CLOSED(2), /** - * READY_TO_SPLIT = 3; - * *
        ** Ask master for ok to split/merge region(s) 
        * 
+ * + * READY_TO_SPLIT = 3; */ - READY_TO_SPLIT(3, 3), + READY_TO_SPLIT(3), /** * READY_TO_MERGE = 4; */ - READY_TO_MERGE(4, 4), + READY_TO_MERGE(4), /** * SPLIT_PONR = 5; */ - SPLIT_PONR(5, 5), + SPLIT_PONR(5), /** * MERGE_PONR = 6; */ - MERGE_PONR(6, 6), + MERGE_PONR(6), /** * SPLIT = 7; */ - SPLIT(7, 7), + SPLIT(7), /** * MERGED = 8; */ - MERGED(8, 8), + MERGED(8), /** * SPLIT_REVERTED = 9; */ - SPLIT_REVERTED(9, 9), + SPLIT_REVERTED(9), /** * MERGE_REVERTED = 10; */ - MERGE_REVERTED(10, 10), + MERGE_REVERTED(10), ; /** @@ -5928,19 +6159,19 @@ public final class RegionServerStatusProtos { */ public static final int FAILED_OPEN_VALUE = 1; /** - * CLOSED = 2; - * *
        ** No failed_close, in which case region server will abort 
        * 
+ * + * CLOSED = 2; */ public static final int CLOSED_VALUE = 2; /** - * READY_TO_SPLIT = 3; - * *
        ** Ask master for ok to split/merge region(s) 
        * 
+ * + * READY_TO_SPLIT = 3; */ public static final int READY_TO_SPLIT_VALUE = 3; /** @@ -5973,9 +6204,19 @@ public final class RegionServerStatusProtos { public static final int MERGE_REVERTED_VALUE = 10; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static TransitionCode valueOf(int value) { + return forNumber(value); + } + + public static TransitionCode forNumber(int value) { switch (value) { case 0: return OPENED; case 1: return FAILED_OPEN; @@ -5996,17 +6237,17 @@ public final class RegionServerStatusProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + TransitionCode> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public TransitionCode findValueByNumber(int number) { - return TransitionCode.valueOf(number); + return TransitionCode.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -6028,11 +6269,9 @@ public final class RegionServerStatusProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private TransitionCode(int index, int value) { - this.index = index; + private TransitionCode(int value) { this.value = value; } @@ -6040,9 +6279,8 @@ public final class RegionServerStatusProtos { } private int bitField0_; - // required .hbase.pb.RegionStateTransition.TransitionCode transition_code = 1; public static final int TRANSITION_CODE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode transitionCode_; + private int transitionCode_; /** * required .hbase.pb.RegionStateTransition.TransitionCode transition_code = 1; */ @@ -6053,98 +6291,93 @@ public final class RegionServerStatusProtos { * required .hbase.pb.RegionStateTransition.TransitionCode transition_code = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode getTransitionCode() { - return transitionCode_; + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode result = org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.valueOf(transitionCode_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.OPENED : result; } - // repeated .hbase.pb.RegionInfo region_info = 2; public static final int REGION_INFO_FIELD_NUMBER = 2; private java.util.List regionInfo_; /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
      ** Mutliple regions are involved during merging/splitting 
      * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public java.util.List getRegionInfoList() { return regionInfo_; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
      ** Mutliple regions are involved during merging/splitting 
      * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public java.util.List getRegionInfoOrBuilderList() { return regionInfo_; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
      ** Mutliple regions are involved during merging/splitting 
      * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public int getRegionInfoCount() { return regionInfo_.size(); } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
      ** Mutliple regions are involved during merging/splitting 
      * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { return regionInfo_.get(index); } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
      ** Mutliple regions are involved during merging/splitting 
      * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( int index) { return regionInfo_.get(index); } - // optional uint64 open_seq_num = 3; public static final int OPEN_SEQ_NUM_FIELD_NUMBER = 3; private long openSeqNum_; /** - * optional uint64 open_seq_num = 3; - * *
      ** For newly opened region, the open seq num is needed 
      * 
+ * + * optional uint64 open_seq_num = 3; */ public boolean hasOpenSeqNum() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint64 open_seq_num = 3; - * *
      ** For newly opened region, the open seq num is needed 
      * 
+ * + * optional uint64 open_seq_num = 3; */ public long getOpenSeqNum() { return openSeqNum_; } - private void initFields() { - transitionCode_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.OPENED; - regionInfo_ = java.util.Collections.emptyList(); - openSeqNum_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTransitionCode()) { memoizedIsInitialized = 0; @@ -6162,9 +6395,8 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, transitionCode_.getNumber()); + output.writeEnum(1, transitionCode_); } for (int i = 0; i < regionInfo_.size(); i++) { output.writeMessage(2, regionInfo_.get(i)); @@ -6172,18 +6404,17 @@ public final class RegionServerStatusProtos { if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(3, openSeqNum_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, transitionCode_.getNumber()); + .computeEnumSize(1, transitionCode_); } for (int i = 0; i < regionInfo_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -6193,19 +6424,13 @@ public final class RegionServerStatusProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, openSeqNum_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6218,8 +6443,7 @@ public final class RegionServerStatusProtos { boolean result = true; result = result && (hasTransitionCode() == other.hasTransitionCode()); if (hasTransitionCode()) { - result = result && - (getTransitionCode() == other.getTransitionCode()); + result = result && transitionCode_ == other.transitionCode_; } result = result && getRegionInfoList() .equals(other.getRegionInfoList()); @@ -6228,12 +6452,10 @@ public final class RegionServerStatusProtos { result = result && (getOpenSeqNum() == other.getOpenSeqNum()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6243,7 +6465,7 @@ public final class RegionServerStatusProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTransitionCode()) { hash = (37 * hash) + TRANSITION_CODE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getTransitionCode()); + hash = (53 * hash) + transitionCode_; } if (getRegionInfoCount() > 0) { hash = (37 * hash) + REGION_INFO_FIELD_NUMBER; @@ -6251,9 +6473,10 @@ public final class RegionServerStatusProtos { } if (hasOpenSeqNum()) { hash = (37 * hash) + OPEN_SEQ_NUM_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getOpenSeqNum()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getOpenSeqNum()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6281,46 +6504,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6328,14 +6562,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.RegionStateTransition} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionStateTransition) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionStateTransition_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionStateTransition_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6348,22 +6583,19 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - transitionCode_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.OPENED; + transitionCode_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (regionInfoBuilder_ == null) { regionInfo_ = java.util.Collections.emptyList(); @@ -6376,10 +6608,6 @@ public final class RegionServerStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionStateTransition_descriptor; @@ -6423,6 +6651,32 @@ public final class RegionServerStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition)other); @@ -6456,7 +6710,7 @@ public final class RegionServerStatusProtos { regionInfo_ = other.regionInfo_; bitField0_ = (bitField0_ & ~0x00000002); regionInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionInfoFieldBuilder() : null; } else { regionInfoBuilder_.addAllMessages(other.regionInfo_); @@ -6466,18 +6720,17 @@ public final class RegionServerStatusProtos { if (other.hasOpenSeqNum()) { setOpenSeqNum(other.getOpenSeqNum()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTransitionCode()) { - return false; } for (int i = 0; i < getRegionInfoCount(); i++) { if (!getRegionInfo(i).isInitialized()) { - return false; } } @@ -6493,7 +6746,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6503,8 +6756,7 @@ public final class RegionServerStatusProtos { } private int bitField0_; - // required .hbase.pb.RegionStateTransition.TransitionCode transition_code = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode transitionCode_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.OPENED; + private int transitionCode_ = 0; /** * required .hbase.pb.RegionStateTransition.TransitionCode transition_code = 1; */ @@ -6515,7 +6767,8 @@ public final class RegionServerStatusProtos { * required .hbase.pb.RegionStateTransition.TransitionCode transition_code = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode getTransitionCode() { - return transitionCode_; + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode result = org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.valueOf(transitionCode_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.OPENED : result; } /** * required .hbase.pb.RegionStateTransition.TransitionCode transition_code = 1; @@ -6525,7 +6778,7 @@ public final class RegionServerStatusProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - transitionCode_ = value; + transitionCode_ = value.getNumber(); onChanged(); return this; } @@ -6534,12 +6787,11 @@ public final class RegionServerStatusProtos { */ public Builder clearTransitionCode() { bitField0_ = (bitField0_ & ~0x00000001); - transitionCode_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode.OPENED; + transitionCode_ = 0; onChanged(); return this; } - // repeated .hbase.pb.RegionInfo region_info = 2; private java.util.List regionInfo_ = java.util.Collections.emptyList(); private void ensureRegionInfoIsMutable() { @@ -6549,15 +6801,15 @@ public final class RegionServerStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public java.util.List getRegionInfoList() { if (regionInfoBuilder_ == null) { @@ -6567,11 +6819,11 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public int getRegionInfoCount() { if (regionInfoBuilder_ == null) { @@ -6581,11 +6833,11 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { if (regionInfoBuilder_ == null) { @@ -6595,11 +6847,11 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public Builder setRegionInfo( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { @@ -6616,11 +6868,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public Builder setRegionInfo( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { @@ -6634,11 +6886,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public Builder addRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { @@ -6654,11 +6906,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public Builder addRegionInfo( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { @@ -6675,11 +6927,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public Builder addRegionInfo( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { @@ -6693,11 +6945,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public Builder addRegionInfo( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { @@ -6711,17 +6963,18 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public Builder addAllRegionInfo( java.lang.Iterable values) { if (regionInfoBuilder_ == null) { ensureRegionInfoIsMutable(); - super.addAll(values, regionInfo_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionInfo_); onChanged(); } else { regionInfoBuilder_.addAllMessages(values); @@ -6729,11 +6982,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public Builder clearRegionInfo() { if (regionInfoBuilder_ == null) { @@ -6746,11 +6999,11 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public Builder removeRegionInfo(int index) { if (regionInfoBuilder_ == null) { @@ -6763,22 +7016,22 @@ public final class RegionServerStatusProtos { return this; } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder( int index) { return getRegionInfoFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( int index) { @@ -6788,11 +7041,11 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public java.util.List getRegionInfoOrBuilderList() { @@ -6803,22 +7056,22 @@ public final class RegionServerStatusProtos { } } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() { return getRegionInfoFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder( int index) { @@ -6826,21 +7079,21 @@ public final class RegionServerStatusProtos { index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); } /** - * repeated .hbase.pb.RegionInfo region_info = 2; - * *
        ** Mutliple regions are involved during merging/splitting 
        * 
+ * + * repeated .hbase.pb.RegionInfo region_info = 2; */ public java.util.List getRegionInfoBuilderList() { return getRegionInfoFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( regionInfo_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -6851,34 +7104,33 @@ public final class RegionServerStatusProtos { return regionInfoBuilder_; } - // optional uint64 open_seq_num = 3; private long openSeqNum_ ; /** - * optional uint64 open_seq_num = 3; - * *
        ** For newly opened region, the open seq num is needed 
        * 
+ * + * optional uint64 open_seq_num = 3; */ public boolean hasOpenSeqNum() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional uint64 open_seq_num = 3; - * *
        ** For newly opened region, the open seq num is needed 
        * 
+ * + * optional uint64 open_seq_num = 3; */ public long getOpenSeqNum() { return openSeqNum_; } /** - * optional uint64 open_seq_num = 3; - * *
        ** For newly opened region, the open seq num is needed 
        * 
+ * + * optional uint64 open_seq_num = 3; */ public Builder setOpenSeqNum(long value) { bitField0_ |= 0x00000004; @@ -6887,11 +7139,11 @@ public final class RegionServerStatusProtos { return this; } /** - * optional uint64 open_seq_num = 3; - * *
        ** For newly opened region, the open seq num is needed 
        * 
+ * + * optional uint64 open_seq_num = 3; */ public Builder clearOpenSeqNum() { bitField0_ = (bitField0_ & ~0x00000004); @@ -6899,48 +7151,84 @@ public final class RegionServerStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionStateTransition) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionStateTransition) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition DEFAULT_INSTANCE; static { - defaultInstance = new RegionStateTransition(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionStateTransition parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionStateTransition(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionStateTransition) } - public interface ReportRegionStateTransitionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReportRegionStateTransitionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReportRegionStateTransitionRequest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ServerName server = 1; /** - * required .hbase.pb.ServerName server = 1; - * *
      ** This region server's server name 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ boolean hasServer(); /** - * required .hbase.pb.ServerName server = 1; - * *
      ** This region server's server name 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer(); /** - * required .hbase.pb.ServerName server = 1; - * *
      ** This region server's server name 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - // repeated .hbase.pb.RegionStateTransition transition = 2; /** * repeated .hbase.pb.RegionStateTransition transition = 2; */ @@ -6968,36 +7256,28 @@ public final class RegionServerStatusProtos { /** * Protobuf type {@code hbase.pb.ReportRegionStateTransitionRequest} */ - public static final class ReportRegionStateTransitionRequest extends - com.google.protobuf.GeneratedMessage - implements ReportRegionStateTransitionRequestOrBuilder { + public static final class ReportRegionStateTransitionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReportRegionStateTransitionRequest) + ReportRegionStateTransitionRequestOrBuilder { // Use ReportRegionStateTransitionRequest.newBuilder() to construct. - private ReportRegionStateTransitionRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReportRegionStateTransitionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReportRegionStateTransitionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReportRegionStateTransitionRequest defaultInstance; - public static ReportRegionStateTransitionRequest getDefaultInstance() { - return defaultInstance; } - - public ReportRegionStateTransitionRequest getDefaultInstanceForType() { - return defaultInstance; + private ReportRegionStateTransitionRequest() { + transition_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReportRegionStateTransitionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7034,7 +7314,8 @@ public final class RegionServerStatusProtos { transition_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - transition_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.PARSER, extensionRegistry)); + transition_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.PARSER, extensionRegistry)); break; } } @@ -7043,7 +7324,7 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { transition_ = java.util.Collections.unmodifiableList(transition_); @@ -7057,64 +7338,47 @@ public final class RegionServerStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReportRegionStateTransitionRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReportRegionStateTransitionRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest.Builder.class); + } + private int bitField0_; - // required .hbase.pb.ServerName server = 1; public static final int SERVER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_; /** - * required .hbase.pb.ServerName server = 1; - * *
      ** This region server's server name 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.ServerName server = 1; - * *
      ** This region server's server name 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } /** - * required .hbase.pb.ServerName server = 1; - * *
      ** This region server's server name 
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } - // repeated .hbase.pb.RegionStateTransition transition = 2; public static final int TRANSITION_FIELD_NUMBER = 2; private java.util.List transition_; /** @@ -7150,14 +7414,11 @@ public final class RegionServerStatusProtos { return transition_.get(index); } - private void initFields() { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - transition_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasServer()) { memoizedIsInitialized = 0; @@ -7179,43 +7440,35 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, server_); + output.writeMessage(1, getServer()); } for (int i = 0; i < transition_.size(); i++) { output.writeMessage(2, transition_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, server_); + .computeMessageSize(1, getServer()); } for (int i = 0; i < transition_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, transition_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7233,12 +7486,10 @@ public final class RegionServerStatusProtos { } result = result && getTransitionList() .equals(other.getTransitionList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7254,7 +7505,7 @@ public final class RegionServerStatusProtos { hash = (37 * hash) + TRANSITION_FIELD_NUMBER; hash = (53 * hash) + getTransitionList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7282,46 +7533,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -7329,14 +7591,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.ReportRegionStateTransitionRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReportRegionStateTransitionRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7349,24 +7612,21 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getServerFieldBuilder(); getTransitionFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; } else { serverBuilder_.clear(); } @@ -7380,10 +7640,6 @@ public final class RegionServerStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor; @@ -7427,6 +7683,32 @@ public final class RegionServerStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest)other); @@ -7460,29 +7742,27 @@ public final class RegionServerStatusProtos { transition_ = other.transition_; bitField0_ = (bitField0_ & ~0x00000002); transitionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTransitionFieldBuilder() : null; } else { transitionBuilder_.addAllMessages(other.transition_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasServer()) { - return false; } if (!getServer().isInitialized()) { - return false; } for (int i = 0; i < getTransitionCount(); i++) { if (!getTransition(i).isInitialized()) { - return false; } } @@ -7498,7 +7778,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7508,40 +7788,39 @@ public final class RegionServerStatusProtos { } private int bitField0_; - // required .hbase.pb.ServerName server = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; /** - * required .hbase.pb.ServerName server = 1; - * *
        ** This region server's server name 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** This region server's server name 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } else { return serverBuilder_.getMessage(); } } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** This region server's server name 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder setServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { @@ -7557,11 +7836,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** This region server's server name 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder setServer( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { @@ -7575,15 +7854,16 @@ public final class RegionServerStatusProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** This region server's server name 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); @@ -7598,15 +7878,15 @@ public final class RegionServerStatusProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** This region server's server name 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder clearServer() { if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; onChanged(); } else { serverBuilder_.clear(); @@ -7615,11 +7895,11 @@ public final class RegionServerStatusProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** This region server's server name 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000001; @@ -7627,33 +7907,34 @@ public final class RegionServerStatusProtos { return getServerFieldBuilder().getBuilder(); } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** This region server's server name 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); } else { - return server_; + return server_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } } /** - * required .hbase.pb.ServerName server = 1; - * *
        ** This region server's server name 
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, + getServer(), getParentForChildren(), isClean()); server_ = null; @@ -7661,7 +7942,6 @@ public final class RegionServerStatusProtos { return serverBuilder_; } - // repeated .hbase.pb.RegionStateTransition transition = 2; private java.util.List transition_ = java.util.Collections.emptyList(); private void ensureTransitionIsMutable() { @@ -7671,7 +7951,7 @@ public final class RegionServerStatusProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder> transitionBuilder_; /** @@ -7803,7 +8083,8 @@ public final class RegionServerStatusProtos { java.lang.Iterable values) { if (transitionBuilder_ == null) { ensureTransitionIsMutable(); - super.addAll(values, transition_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, transition_); onChanged(); } else { transitionBuilder_.addAllMessages(values); @@ -7886,11 +8167,11 @@ public final class RegionServerStatusProtos { getTransitionBuilderList() { return getTransitionFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder> getTransitionFieldBuilder() { if (transitionBuilder_ == null) { - transitionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + transitionBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransitionOrBuilder>( transition_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -7900,44 +8181,81 @@ public final class RegionServerStatusProtos { } return transitionBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReportRegionStateTransitionRequest) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReportRegionStateTransitionRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest DEFAULT_INSTANCE; static { - defaultInstance = new ReportRegionStateTransitionRequest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReportRegionStateTransitionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReportRegionStateTransitionRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReportRegionStateTransitionRequest) } - public interface ReportRegionStateTransitionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReportRegionStateTransitionResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReportRegionStateTransitionResponse) + com.google.protobuf.MessageOrBuilder { - // optional string error_message = 1; /** - * optional string error_message = 1; - * *
      ** Error message if failed to update the region state 
      * 
+ * + * optional string error_message = 1; */ boolean hasErrorMessage(); /** - * optional string error_message = 1; - * *
      ** Error message if failed to update the region state 
      * 
+ * + * optional string error_message = 1; */ java.lang.String getErrorMessage(); /** - * optional string error_message = 1; - * *
      ** Error message if failed to update the region state 
      * 
+ * + * optional string error_message = 1; */ com.google.protobuf.ByteString getErrorMessageBytes(); @@ -7945,36 +8263,28 @@ public final class RegionServerStatusProtos { /** * Protobuf type {@code hbase.pb.ReportRegionStateTransitionResponse} */ - public static final class ReportRegionStateTransitionResponse extends - com.google.protobuf.GeneratedMessage - implements ReportRegionStateTransitionResponseOrBuilder { + public static final class ReportRegionStateTransitionResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReportRegionStateTransitionResponse) + ReportRegionStateTransitionResponseOrBuilder { // Use ReportRegionStateTransitionResponse.newBuilder() to construct. - private ReportRegionStateTransitionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReportRegionStateTransitionResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReportRegionStateTransitionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReportRegionStateTransitionResponse defaultInstance; - public static ReportRegionStateTransitionResponse getDefaultInstance() { - return defaultInstance; } - - public ReportRegionStateTransitionResponse getDefaultInstanceForType() { - return defaultInstance; + private ReportRegionStateTransitionResponse() { + errorMessage_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReportRegionStateTransitionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7994,8 +8304,9 @@ public final class RegionServerStatusProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - errorMessage_ = input.readBytes(); + errorMessage_ = bs; break; } } @@ -8004,7 +8315,7 @@ public final class RegionServerStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -8015,48 +8326,32 @@ public final class RegionServerStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReportRegionStateTransitionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReportRegionStateTransitionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional string error_message = 1; public static final int ERROR_MESSAGE_FIELD_NUMBER = 1; - private java.lang.Object errorMessage_; + private volatile java.lang.Object errorMessage_; /** - * optional string error_message = 1; - * *
      ** Error message if failed to update the region state 
      * 
+ * + * optional string error_message = 1; */ public boolean hasErrorMessage() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional string error_message = 1; - * *
      ** Error message if failed to update the region state 
      * 
+ * + * optional string error_message = 1; */ public java.lang.String getErrorMessage() { java.lang.Object ref = errorMessage_; @@ -8073,11 +8368,11 @@ public final class RegionServerStatusProtos { } } /** - * optional string error_message = 1; - * *
      ** Error message if failed to update the region state 
      * 
+ * + * optional string error_message = 1; */ public com.google.protobuf.ByteString getErrorMessageBytes() { @@ -8093,13 +8388,11 @@ public final class RegionServerStatusProtos { } } - private void initFields() { - errorMessage_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -8107,36 +8400,27 @@ public final class RegionServerStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getErrorMessageBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, errorMessage_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getErrorMessageBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, errorMessage_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8152,12 +8436,10 @@ public final class RegionServerStatusProtos { result = result && getErrorMessage() .equals(other.getErrorMessage()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -8169,7 +8451,7 @@ public final class RegionServerStatusProtos { hash = (37 * hash) + ERROR_MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getErrorMessage().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -8197,46 +8479,57 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -8244,14 +8537,15 @@ public final class RegionServerStatusProtos { * Protobuf type {@code hbase.pb.ReportRegionStateTransitionResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponseOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReportRegionStateTransitionResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8264,18 +8558,15 @@ public final class RegionServerStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); errorMessage_ = ""; @@ -8283,10 +8574,6 @@ public final class RegionServerStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor; @@ -8317,6 +8604,32 @@ public final class RegionServerStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse)other); @@ -8333,7 +8646,8 @@ public final class RegionServerStatusProtos { errorMessage_ = other.errorMessage_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -8350,7 +8664,7 @@ public final class RegionServerStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8360,42 +8674,44 @@ public final class RegionServerStatusProtos { } private int bitField0_; - // optional string error_message = 1; private java.lang.Object errorMessage_ = ""; /** - * optional string error_message = 1; - * *
        ** Error message if failed to update the region state 
        * 
+ * + * optional string error_message = 1; */ public boolean hasErrorMessage() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional string error_message = 1; - * *
        ** Error message if failed to update the region state 
        * 
+ * + * optional string error_message = 1; */ public java.lang.String getErrorMessage() { java.lang.Object ref = errorMessage_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - errorMessage_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + errorMessage_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * optional string error_message = 1; - * *
        ** Error message if failed to update the region state 
        * 
+ * + * optional string error_message = 1; */ public com.google.protobuf.ByteString getErrorMessageBytes() { @@ -8411,11 +8727,11 @@ public final class RegionServerStatusProtos { } } /** - * optional string error_message = 1; - * *
        ** Error message if failed to update the region state 
        * 
+ * + * optional string error_message = 1; */ public Builder setErrorMessage( java.lang.String value) { @@ -8428,11 +8744,11 @@ public final class RegionServerStatusProtos { return this; } /** - * optional string error_message = 1; - * *
        ** Error message if failed to update the region state 
        * 
+ * + * optional string error_message = 1; */ public Builder clearErrorMessage() { bitField0_ = (bitField0_ & ~0x00000001); @@ -8441,11 +8757,11 @@ public final class RegionServerStatusProtos { return this; } /** - * optional string error_message = 1; - * *
        ** Error message if failed to update the region state 
        * 
+ * + * optional string error_message = 1; */ public Builder setErrorMessageBytes( com.google.protobuf.ByteString value) { @@ -8457,16 +8773,53 @@ public final class RegionServerStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReportRegionStateTransitionResponse) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReportRegionStateTransitionResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse DEFAULT_INSTANCE; static { - defaultInstance = new ReportRegionStateTransitionResponse(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReportRegionStateTransitionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReportRegionStateTransitionResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReportRegionStateTransitionResponse) } /** @@ -8478,11 +8831,11 @@ public final class RegionServerStatusProtos { public interface Interface { /** - * rpc RegionServerStartup(.hbase.pb.RegionServerStartupRequest) returns (.hbase.pb.RegionServerStartupResponse); - * *
        ** Called when a region server first starts. 
        * 
+ * + * rpc RegionServerStartup(.hbase.pb.RegionServerStartupRequest) returns (.hbase.pb.RegionServerStartupResponse); */ public abstract void regionServerStartup( com.google.protobuf.RpcController controller, @@ -8490,11 +8843,11 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done); /** - * rpc RegionServerReport(.hbase.pb.RegionServerReportRequest) returns (.hbase.pb.RegionServerReportResponse); - * *
        ** Called to report the load the RegionServer is under. 
        * 
+ * + * rpc RegionServerReport(.hbase.pb.RegionServerReportRequest) returns (.hbase.pb.RegionServerReportResponse); */ public abstract void regionServerReport( com.google.protobuf.RpcController controller, @@ -8502,13 +8855,13 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done); /** - * rpc ReportRSFatalError(.hbase.pb.ReportRSFatalErrorRequest) returns (.hbase.pb.ReportRSFatalErrorResponse); - * *
        **
        * Called by a region server to report a fatal error that is causing it to
        * abort.
        * 
+ * + * rpc ReportRSFatalError(.hbase.pb.ReportRSFatalErrorRequest) returns (.hbase.pb.ReportRSFatalErrorResponse); */ public abstract void reportRSFatalError( com.google.protobuf.RpcController controller, @@ -8516,13 +8869,13 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetLastFlushedSequenceId(.hbase.pb.GetLastFlushedSequenceIdRequest) returns (.hbase.pb.GetLastFlushedSequenceIdResponse); - * *
        ** Called to get the sequence id of the last MemStore entry flushed to an
        * HFile for a specified region. Used by the region server to speed up
        * log splitting. 
        * 
+ * + * rpc GetLastFlushedSequenceId(.hbase.pb.GetLastFlushedSequenceIdRequest) returns (.hbase.pb.GetLastFlushedSequenceIdResponse); */ public abstract void getLastFlushedSequenceId( com.google.protobuf.RpcController controller, @@ -8530,14 +8883,14 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done); /** - * rpc ReportRegionStateTransition(.hbase.pb.ReportRegionStateTransitionRequest) returns (.hbase.pb.ReportRegionStateTransitionResponse); - * *
        **
        * Called by a region server to report the progress of a region
        * transition. If the request fails, the transition should
        * be aborted.
        * 
+ * + * rpc ReportRegionStateTransition(.hbase.pb.ReportRegionStateTransitionRequest) returns (.hbase.pb.ReportRegionStateTransitionResponse); */ public abstract void reportRegionStateTransition( com.google.protobuf.RpcController controller, @@ -8678,11 +9031,11 @@ public final class RegionServerStatusProtos { } /** - * rpc RegionServerStartup(.hbase.pb.RegionServerStartupRequest) returns (.hbase.pb.RegionServerStartupResponse); - * *
      ** Called when a region server first starts. 
      * 
+ * + * rpc RegionServerStartup(.hbase.pb.RegionServerStartupRequest) returns (.hbase.pb.RegionServerStartupResponse); */ public abstract void regionServerStartup( com.google.protobuf.RpcController controller, @@ -8690,11 +9043,11 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done); /** - * rpc RegionServerReport(.hbase.pb.RegionServerReportRequest) returns (.hbase.pb.RegionServerReportResponse); - * *
      ** Called to report the load the RegionServer is under. 
      * 
+ * + * rpc RegionServerReport(.hbase.pb.RegionServerReportRequest) returns (.hbase.pb.RegionServerReportResponse); */ public abstract void regionServerReport( com.google.protobuf.RpcController controller, @@ -8702,13 +9055,13 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done); /** - * rpc ReportRSFatalError(.hbase.pb.ReportRSFatalErrorRequest) returns (.hbase.pb.ReportRSFatalErrorResponse); - * *
      **
      * Called by a region server to report a fatal error that is causing it to
      * abort.
      * 
+ * + * rpc ReportRSFatalError(.hbase.pb.ReportRSFatalErrorRequest) returns (.hbase.pb.ReportRSFatalErrorResponse); */ public abstract void reportRSFatalError( com.google.protobuf.RpcController controller, @@ -8716,13 +9069,13 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done); /** - * rpc GetLastFlushedSequenceId(.hbase.pb.GetLastFlushedSequenceIdRequest) returns (.hbase.pb.GetLastFlushedSequenceIdResponse); - * *
      ** Called to get the sequence id of the last MemStore entry flushed to an
      * HFile for a specified region. Used by the region server to speed up
      * log splitting. 
      * 
+ * + * rpc GetLastFlushedSequenceId(.hbase.pb.GetLastFlushedSequenceIdRequest) returns (.hbase.pb.GetLastFlushedSequenceIdResponse); */ public abstract void getLastFlushedSequenceId( com.google.protobuf.RpcController controller, @@ -8730,14 +9083,14 @@ public final class RegionServerStatusProtos { com.google.protobuf.RpcCallback done); /** - * rpc ReportRegionStateTransition(.hbase.pb.ReportRegionStateTransitionRequest) returns (.hbase.pb.ReportRegionStateTransitionResponse); - * *
      **
      * Called by a region server to report the progress of a region
      * transition. If the request fails, the transition should
      * be aborted.
      * 
+ * + * rpc ReportRegionStateTransition(.hbase.pb.ReportRegionStateTransitionRequest) returns (.hbase.pb.ReportRegionStateTransitionResponse); */ public abstract void reportRegionStateTransition( com.google.protobuf.RpcController controller, @@ -9039,67 +9392,67 @@ public final class RegionServerStatusProtos { // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerStatusService) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionServerStartupRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionServerStartupResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionServerReportRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionServerReportResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionServerReportResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReportRSFatalErrorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReportRSFatalErrorResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionStateTransition_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionStateTransition_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReportRegionStateTransitionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReportRegionStateTransitionResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -9154,85 +9507,87 @@ public final class RegionServerStatusProtos { "tosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_RegionServerStartupRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionServerStartupRequest_descriptor, - new java.lang.String[] { "Port", "ServerStartCode", "ServerCurrentTime", "UseThisHostnameInstead", }); - internal_static_hbase_pb_RegionServerStartupResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionServerStartupResponse_descriptor, - new java.lang.String[] { "MapEntries", }); - internal_static_hbase_pb_RegionServerReportRequest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionServerReportRequest_descriptor, - new java.lang.String[] { "Server", "Load", }); - internal_static_hbase_pb_RegionServerReportResponse_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_RegionServerReportResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionServerReportResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_ReportRSFatalErrorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor, - new java.lang.String[] { "Server", "ErrorMessage", }); - internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_ReportRSFatalErrorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor, - new java.lang.String[] { "RegionName", }); - internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor, - new java.lang.String[] { "LastFlushedSequenceId", "StoreLastFlushedSequenceId", }); - internal_static_hbase_pb_RegionStateTransition_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_RegionStateTransition_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionStateTransition_descriptor, - new java.lang.String[] { "TransitionCode", "RegionInfo", "OpenSeqNum", }); - internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_hbase_pb_ReportRegionStateTransitionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor, - new java.lang.String[] { "Server", "Transition", }); - internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_hbase_pb_ReportRegionStateTransitionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor, - new java.lang.String[] { "ErrorMessage", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_RegionServerStartupRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionServerStartupRequest_descriptor, + new java.lang.String[] { "Port", "ServerStartCode", "ServerCurrentTime", "UseThisHostnameInstead", }); + internal_static_hbase_pb_RegionServerStartupResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionServerStartupResponse_descriptor, + new java.lang.String[] { "MapEntries", }); + internal_static_hbase_pb_RegionServerReportRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionServerReportRequest_descriptor, + new java.lang.String[] { "Server", "Load", }); + internal_static_hbase_pb_RegionServerReportResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_RegionServerReportResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionServerReportResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_ReportRSFatalErrorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReportRSFatalErrorRequest_descriptor, + new java.lang.String[] { "Server", "ErrorMessage", }); + internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_ReportRSFatalErrorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReportRSFatalErrorResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetLastFlushedSequenceIdRequest_descriptor, + new java.lang.String[] { "RegionName", }); + internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetLastFlushedSequenceIdResponse_descriptor, + new java.lang.String[] { "LastFlushedSequenceId", "StoreLastFlushedSequenceId", }); + internal_static_hbase_pb_RegionStateTransition_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_RegionStateTransition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionStateTransition_descriptor, + new java.lang.String[] { "TransitionCode", "RegionInfo", "OpenSeqNum", }); + internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_hbase_pb_ReportRegionStateTransitionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReportRegionStateTransitionRequest_descriptor, + new java.lang.String[] { "Server", "Transition", }); + internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_hbase_pb_ReportRegionStateTransitionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReportRegionStateTransitionResponse_descriptor, + new java.lang.String[] { "ErrorMessage", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java index bb09cde..4d3b294 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class SnapshotProtos { private SnapshotProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface SnapshotFileInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SnapshotFileInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotFileInfo) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.SnapshotFileInfo.Type type = 1; /** * required .hbase.pb.SnapshotFileInfo.Type type = 1; */ @@ -21,7 +27,6 @@ public final class SnapshotProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type getType(); - // optional string hfile = 3; /** * optional string hfile = 3; */ @@ -36,7 +41,6 @@ public final class SnapshotProtos { com.google.protobuf.ByteString getHfileBytes(); - // optional string wal_server = 4; /** * optional string wal_server = 4; */ @@ -51,7 +55,6 @@ public final class SnapshotProtos { com.google.protobuf.ByteString getWalServerBytes(); - // optional string wal_name = 5; /** * optional string wal_name = 5; */ @@ -69,36 +72,31 @@ public final class SnapshotProtos { /** * Protobuf type {@code hbase.pb.SnapshotFileInfo} */ - public static final class SnapshotFileInfo extends - com.google.protobuf.GeneratedMessage - implements SnapshotFileInfoOrBuilder { + public static final class SnapshotFileInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotFileInfo) + SnapshotFileInfoOrBuilder { // Use SnapshotFileInfo.newBuilder() to construct. - private SnapshotFileInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + private SnapshotFileInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private SnapshotFileInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SnapshotFileInfo defaultInstance; - public static SnapshotFileInfo getDefaultInstance() { - return defaultInstance; - } - - public SnapshotFileInfo getDefaultInstanceForType() { - return defaultInstance; + private SnapshotFileInfo() { + type_ = 1; + hfile_ = ""; + walServer_ = ""; + walName_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SnapshotFileInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -124,23 +122,26 @@ public final class SnapshotProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - type_ = value; + type_ = rawValue; } break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - hfile_ = input.readBytes(); + hfile_ = bs; break; } case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; - walServer_ = input.readBytes(); + walServer_ = bs; break; } case 42: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; - walName_ = input.readBytes(); + walName_ = bs; break; } } @@ -149,7 +150,7 @@ public final class SnapshotProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -160,28 +161,13 @@ public final class SnapshotProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SnapshotFileInfo parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SnapshotFileInfo(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.SnapshotFileInfo.Type} */ @@ -190,11 +176,11 @@ public final class SnapshotProtos { /** * HFILE = 1; */ - HFILE(0, 1), + HFILE(1), /** * WAL = 2; */ - WAL(1, 2), + WAL(2), ; /** @@ -207,9 +193,19 @@ public final class SnapshotProtos { public static final int WAL_VALUE = 2; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static Type valueOf(int value) { + return forNumber(value); + } + + public static Type forNumber(int value) { switch (value) { case 1: return HFILE; case 2: return WAL; @@ -221,17 +217,17 @@ public final class SnapshotProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + Type> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public Type findValueByNumber(int number) { - return Type.valueOf(number); + return Type.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -253,11 +249,9 @@ public final class SnapshotProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private Type(int index, int value) { - this.index = index; + private Type(int value) { this.value = value; } @@ -265,9 +259,8 @@ public final class SnapshotProtos { } private int bitField0_; - // required .hbase.pb.SnapshotFileInfo.Type type = 1; public static final int TYPE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type type_; + private int type_; /** * required .hbase.pb.SnapshotFileInfo.Type type = 1; */ @@ -278,12 +271,12 @@ public final class SnapshotProtos { * required .hbase.pb.SnapshotFileInfo.Type type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type getType() { - return type_; + org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type.valueOf(type_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type.HFILE : result; } - // optional string hfile = 3; public static final int HFILE_FIELD_NUMBER = 3; - private java.lang.Object hfile_; + private volatile java.lang.Object hfile_; /** * optional string hfile = 3; */ @@ -324,9 +317,8 @@ public final class SnapshotProtos { } } - // optional string wal_server = 4; public static final int WAL_SERVER_FIELD_NUMBER = 4; - private java.lang.Object walServer_; + private volatile java.lang.Object walServer_; /** * optional string wal_server = 4; */ @@ -367,9 +359,8 @@ public final class SnapshotProtos { } } - // optional string wal_name = 5; public static final int WAL_NAME_FIELD_NUMBER = 5; - private java.lang.Object walName_; + private volatile java.lang.Object walName_; /** * optional string wal_name = 5; */ @@ -410,16 +401,11 @@ public final class SnapshotProtos { } } - private void initFields() { - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type.HFILE; - hfile_ = ""; - walServer_ = ""; - walName_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasType()) { memoizedIsInitialized = 0; @@ -431,57 +417,46 @@ public final class SnapshotProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, type_.getNumber()); + output.writeEnum(1, type_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(3, getHfileBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, hfile_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(4, getWalServerBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, walServer_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(5, getWalNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, walName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, type_.getNumber()); + .computeEnumSize(1, type_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getHfileBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, hfile_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getWalServerBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, walServer_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(5, getWalNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, walName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -494,8 +469,7 @@ public final class SnapshotProtos { boolean result = true; result = result && (hasType() == other.hasType()); if (hasType()) { - result = result && - (getType() == other.getType()); + result = result && type_ == other.type_; } result = result && (hasHfile() == other.hasHfile()); if (hasHfile()) { @@ -512,12 +486,10 @@ public final class SnapshotProtos { result = result && getWalName() .equals(other.getWalName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -527,7 +499,7 @@ public final class SnapshotProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getType()); + hash = (53 * hash) + type_; } if (hasHfile()) { hash = (37 * hash) + HFILE_FIELD_NUMBER; @@ -541,7 +513,7 @@ public final class SnapshotProtos { hash = (37 * hash) + WAL_NAME_FIELD_NUMBER; hash = (53 * hash) + getWalName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -569,46 +541,57 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -616,14 +599,15 @@ public final class SnapshotProtos { * Protobuf type {@code hbase.pb.SnapshotFileInfo} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotFileInfo) + org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -636,21 +620,18 @@ public final class SnapshotProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type.HFILE; + type_ = 1; bitField0_ = (bitField0_ & ~0x00000001); hfile_ = ""; bitField0_ = (bitField0_ & ~0x00000002); @@ -661,10 +642,6 @@ public final class SnapshotProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor; @@ -707,6 +684,32 @@ public final class SnapshotProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo)other); @@ -736,13 +739,13 @@ public final class SnapshotProtos { walName_ = other.walName_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasType()) { - return false; } return true; @@ -757,7 +760,7 @@ public final class SnapshotProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -767,8 +770,7 @@ public final class SnapshotProtos { } private int bitField0_; - // required .hbase.pb.SnapshotFileInfo.Type type = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type.HFILE; + private int type_ = 1; /** * required .hbase.pb.SnapshotFileInfo.Type type = 1; */ @@ -779,7 +781,8 @@ public final class SnapshotProtos { * required .hbase.pb.SnapshotFileInfo.Type type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type getType() { - return type_; + org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type.valueOf(type_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type.HFILE : result; } /** * required .hbase.pb.SnapshotFileInfo.Type type = 1; @@ -789,7 +792,7 @@ public final class SnapshotProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - type_ = value; + type_ = value.getNumber(); onChanged(); return this; } @@ -798,12 +801,11 @@ public final class SnapshotProtos { */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); - type_ = org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type.HFILE; + type_ = 1; onChanged(); return this; } - // optional string hfile = 3; private java.lang.Object hfile_ = ""; /** * optional string hfile = 3; @@ -817,9 +819,12 @@ public final class SnapshotProtos { public java.lang.String getHfile() { java.lang.Object ref = hfile_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - hfile_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + hfile_ = s; + } return s; } else { return (java.lang.String) ref; @@ -877,7 +882,6 @@ public final class SnapshotProtos { return this; } - // optional string wal_server = 4; private java.lang.Object walServer_ = ""; /** * optional string wal_server = 4; @@ -891,9 +895,12 @@ public final class SnapshotProtos { public java.lang.String getWalServer() { java.lang.Object ref = walServer_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - walServer_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + walServer_ = s; + } return s; } else { return (java.lang.String) ref; @@ -951,7 +958,6 @@ public final class SnapshotProtos { return this; } - // optional string wal_name = 5; private java.lang.Object walName_ = ""; /** * optional string wal_name = 5; @@ -965,9 +971,12 @@ public final class SnapshotProtos { public java.lang.String getWalName() { java.lang.Object ref = walName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - walName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + walName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -1024,22 +1033,59 @@ public final class SnapshotProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotFileInfo) } + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotFileInfo) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo DEFAULT_INSTANCE; static { - defaultInstance = new SnapshotFileInfo(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SnapshotFileInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SnapshotFileInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotFileInfo) } - public interface SnapshotRegionManifestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SnapshotRegionManifestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotRegionManifest) + com.google.protobuf.MessageOrBuilder { - // optional int32 version = 1; /** * optional int32 version = 1; */ @@ -1049,7 +1095,6 @@ public final class SnapshotProtos { */ int getVersion(); - // required .hbase.pb.RegionInfo region_info = 2; /** * required .hbase.pb.RegionInfo region_info = 2; */ @@ -1063,7 +1108,6 @@ public final class SnapshotProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); - // repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3; /** * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3; */ @@ -1091,36 +1135,29 @@ public final class SnapshotProtos { /** * Protobuf type {@code hbase.pb.SnapshotRegionManifest} */ - public static final class SnapshotRegionManifest extends - com.google.protobuf.GeneratedMessage - implements SnapshotRegionManifestOrBuilder { + public static final class SnapshotRegionManifest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotRegionManifest) + SnapshotRegionManifestOrBuilder { // Use SnapshotRegionManifest.newBuilder() to construct. - private SnapshotRegionManifest(com.google.protobuf.GeneratedMessage.Builder builder) { + private SnapshotRegionManifest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private SnapshotRegionManifest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SnapshotRegionManifest defaultInstance; - public static SnapshotRegionManifest getDefaultInstance() { - return defaultInstance; - } - - public SnapshotRegionManifest getDefaultInstanceForType() { - return defaultInstance; + private SnapshotRegionManifest() { + version_ = 0; + familyFiles_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SnapshotRegionManifest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1162,7 +1199,8 @@ public final class SnapshotProtos { familyFiles_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - familyFiles_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.PARSER, extensionRegistry)); + familyFiles_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.PARSER, extensionRegistry)); break; } } @@ -1171,7 +1209,7 @@ public final class SnapshotProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { familyFiles_ = java.util.Collections.unmodifiableList(familyFiles_); @@ -1185,32 +1223,17 @@ public final class SnapshotProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SnapshotRegionManifest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SnapshotRegionManifest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public interface StoreFileOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StoreFileOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotRegionManifest.StoreFile) + com.google.protobuf.MessageOrBuilder { - // required string name = 1; /** * required string name = 1; */ @@ -1225,7 +1248,6 @@ public final class SnapshotProtos { com.google.protobuf.ByteString getNameBytes(); - // optional .hbase.pb.Reference reference = 2; /** * optional .hbase.pb.Reference reference = 2; */ @@ -1239,57 +1261,49 @@ public final class SnapshotProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.ReferenceOrBuilder getReferenceOrBuilder(); - // optional uint64 file_size = 3; /** - * optional uint64 file_size = 3; - * *
        * TODO: Add checksums or other fields to verify the file
        * 
+ * + * optional uint64 file_size = 3; */ boolean hasFileSize(); /** - * optional uint64 file_size = 3; - * *
        * TODO: Add checksums or other fields to verify the file
        * 
+ * + * optional uint64 file_size = 3; */ long getFileSize(); } /** * Protobuf type {@code hbase.pb.SnapshotRegionManifest.StoreFile} */ - public static final class StoreFile extends - com.google.protobuf.GeneratedMessage - implements StoreFileOrBuilder { + public static final class StoreFile extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotRegionManifest.StoreFile) + StoreFileOrBuilder { // Use StoreFile.newBuilder() to construct. - private StoreFile(com.google.protobuf.GeneratedMessage.Builder builder) { + private StoreFile(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StoreFile(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StoreFile defaultInstance; - public static StoreFile getDefaultInstance() { - return defaultInstance; } - - public StoreFile getDefaultInstanceForType() { - return defaultInstance; + private StoreFile() { + name_ = ""; + fileSize_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StoreFile( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1309,8 +1323,9 @@ public final class SnapshotProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - name_ = input.readBytes(); + name_ = bs; break; } case 18: { @@ -1337,7 +1352,7 @@ public final class SnapshotProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1348,32 +1363,16 @@ public final class SnapshotProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.class, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StoreFile parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StoreFile(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; + private volatile java.lang.Object name_; /** * required string name = 1; */ @@ -1414,7 +1413,6 @@ public final class SnapshotProtos { } } - // optional .hbase.pb.Reference reference = 2; public static final int REFERENCE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference reference_; /** @@ -1427,48 +1425,43 @@ public final class SnapshotProtos { * optional .hbase.pb.Reference reference = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference getReference() { - return reference_; + return reference_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.getDefaultInstance() : reference_; } /** * optional .hbase.pb.Reference reference = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.ReferenceOrBuilder getReferenceOrBuilder() { - return reference_; + return reference_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.getDefaultInstance() : reference_; } - // optional uint64 file_size = 3; public static final int FILE_SIZE_FIELD_NUMBER = 3; private long fileSize_; /** - * optional uint64 file_size = 3; - * *
        * TODO: Add checksums or other fields to verify the file
        * 
+ * + * optional uint64 file_size = 3; */ public boolean hasFileSize() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional uint64 file_size = 3; - * *
        * TODO: Add checksums or other fields to verify the file
        * 
+ * + * optional uint64 file_size = 3; */ public long getFileSize() { return fileSize_; } - private void initFields() { - name_ = ""; - reference_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.getDefaultInstance(); - fileSize_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; @@ -1486,50 +1479,41 @@ public final class SnapshotProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, reference_); + output.writeMessage(2, getReference()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, fileSize_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, reference_); + .computeMessageSize(2, getReference()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, fileSize_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1555,12 +1539,10 @@ public final class SnapshotProtos { result = result && (getFileSize() == other.getFileSize()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1578,9 +1560,10 @@ public final class SnapshotProtos { } if (hasFileSize()) { hash = (37 * hash) + FILE_SIZE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getFileSize()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getFileSize()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1608,46 +1591,57 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1655,14 +1649,15 @@ public final class SnapshotProtos { * Protobuf type {@code hbase.pb.SnapshotRegionManifest.StoreFile} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotRegionManifest.StoreFile) + org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1675,25 +1670,22 @@ public final class SnapshotProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getReferenceFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (referenceBuilder_ == null) { - reference_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.getDefaultInstance(); + reference_ = null; } else { referenceBuilder_.clear(); } @@ -1703,10 +1695,6 @@ public final class SnapshotProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor; @@ -1749,6 +1737,32 @@ public final class SnapshotProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile)other); @@ -1771,18 +1785,17 @@ public final class SnapshotProtos { if (other.hasFileSize()) { setFileSize(other.getFileSize()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { - return false; } if (hasReference()) { if (!getReference().isInitialized()) { - return false; } } @@ -1798,7 +1811,7 @@ public final class SnapshotProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1808,7 +1821,6 @@ public final class SnapshotProtos { } private int bitField0_; - // required string name = 1; private java.lang.Object name_ = ""; /** * required string name = 1; @@ -1822,9 +1834,12 @@ public final class SnapshotProtos { public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - name_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; + } return s; } else { return (java.lang.String) ref; @@ -1882,9 +1897,8 @@ public final class SnapshotProtos { return this; } - // optional .hbase.pb.Reference reference = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference reference_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference reference_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.ReferenceOrBuilder> referenceBuilder_; /** * optional .hbase.pb.Reference reference = 2; @@ -1897,7 +1911,7 @@ public final class SnapshotProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference getReference() { if (referenceBuilder_ == null) { - return reference_; + return reference_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.getDefaultInstance() : reference_; } else { return referenceBuilder_.getMessage(); } @@ -1938,6 +1952,7 @@ public final class SnapshotProtos { public Builder mergeReference(org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference value) { if (referenceBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + reference_ != null && reference_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.getDefaultInstance()) { reference_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.newBuilder(reference_).mergeFrom(value).buildPartial(); @@ -1956,7 +1971,7 @@ public final class SnapshotProtos { */ public Builder clearReference() { if (referenceBuilder_ == null) { - reference_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.getDefaultInstance(); + reference_ = null; onChanged(); } else { referenceBuilder_.clear(); @@ -1979,19 +1994,20 @@ public final class SnapshotProtos { if (referenceBuilder_ != null) { return referenceBuilder_.getMessageOrBuilder(); } else { - return reference_; + return reference_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.getDefaultInstance() : reference_; } } /** * optional .hbase.pb.Reference reference = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.ReferenceOrBuilder> getReferenceFieldBuilder() { if (referenceBuilder_ == null) { - referenceBuilder_ = new com.google.protobuf.SingleFieldBuilder< + referenceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.ReferenceOrBuilder>( - reference_, + getReference(), getParentForChildren(), isClean()); reference_ = null; @@ -1999,34 +2015,33 @@ public final class SnapshotProtos { return referenceBuilder_; } - // optional uint64 file_size = 3; private long fileSize_ ; /** - * optional uint64 file_size = 3; - * *
          * TODO: Add checksums or other fields to verify the file
          * 
+ * + * optional uint64 file_size = 3; */ public boolean hasFileSize() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional uint64 file_size = 3; - * *
          * TODO: Add checksums or other fields to verify the file
          * 
+ * + * optional uint64 file_size = 3; */ public long getFileSize() { return fileSize_; } /** - * optional uint64 file_size = 3; - * *
          * TODO: Add checksums or other fields to verify the file
          * 
+ * + * optional uint64 file_size = 3; */ public Builder setFileSize(long value) { bitField0_ |= 0x00000004; @@ -2035,11 +2050,11 @@ public final class SnapshotProtos { return this; } /** - * optional uint64 file_size = 3; - * *
          * TODO: Add checksums or other fields to verify the file
          * 
+ * + * optional uint64 file_size = 3; */ public Builder clearFileSize() { bitField0_ = (bitField0_ & ~0x00000004); @@ -2047,22 +2062,59 @@ public final class SnapshotProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRegionManifest.StoreFile) } + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest.StoreFile) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile DEFAULT_INSTANCE; static { - defaultInstance = new StoreFile(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StoreFile parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StoreFile(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest.StoreFile) } - public interface FamilyFilesOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FamilyFilesOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotRegionManifest.FamilyFiles) + com.google.protobuf.MessageOrBuilder { - // required bytes family_name = 1; /** * required bytes family_name = 1; */ @@ -2072,7 +2124,6 @@ public final class SnapshotProtos { */ com.google.protobuf.ByteString getFamilyName(); - // repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2; /** * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2; */ @@ -2100,36 +2151,29 @@ public final class SnapshotProtos { /** * Protobuf type {@code hbase.pb.SnapshotRegionManifest.FamilyFiles} */ - public static final class FamilyFiles extends - com.google.protobuf.GeneratedMessage - implements FamilyFilesOrBuilder { + public static final class FamilyFiles extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotRegionManifest.FamilyFiles) + FamilyFilesOrBuilder { // Use FamilyFiles.newBuilder() to construct. - private FamilyFiles(com.google.protobuf.GeneratedMessage.Builder builder) { + private FamilyFiles(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private FamilyFiles(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FamilyFiles defaultInstance; - public static FamilyFiles getDefaultInstance() { - return defaultInstance; - } - - public FamilyFiles getDefaultInstanceForType() { - return defaultInstance; + private FamilyFiles() { + familyName_ = com.google.protobuf.ByteString.EMPTY; + storeFiles_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FamilyFiles( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2158,7 +2202,8 @@ public final class SnapshotProtos { storeFiles_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - storeFiles_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.PARSER, extensionRegistry)); + storeFiles_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.PARSER, extensionRegistry)); break; } } @@ -2167,7 +2212,7 @@ public final class SnapshotProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { storeFiles_ = java.util.Collections.unmodifiableList(storeFiles_); @@ -2181,30 +2226,14 @@ public final class SnapshotProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.class, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FamilyFiles parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FamilyFiles(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes family_name = 1; public static final int FAMILY_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString familyName_; /** @@ -2220,7 +2249,6 @@ public final class SnapshotProtos { return familyName_; } - // repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2; public static final int STORE_FILES_FIELD_NUMBER = 2; private java.util.List storeFiles_; /** @@ -2256,14 +2284,11 @@ public final class SnapshotProtos { return storeFiles_.get(index); } - private void initFields() { - familyName_ = com.google.protobuf.ByteString.EMPTY; - storeFiles_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFamilyName()) { memoizedIsInitialized = 0; @@ -2281,19 +2306,17 @@ public final class SnapshotProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, familyName_); } for (int i = 0; i < storeFiles_.size(); i++) { output.writeMessage(2, storeFiles_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -2305,19 +2328,13 @@ public final class SnapshotProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, storeFiles_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2335,12 +2352,10 @@ public final class SnapshotProtos { } result = result && getStoreFilesList() .equals(other.getStoreFilesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2356,7 +2371,7 @@ public final class SnapshotProtos { hash = (37 * hash) + STORE_FILES_FIELD_NUMBER; hash = (53 * hash) + getStoreFilesList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2384,46 +2399,57 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -2431,14 +2457,15 @@ public final class SnapshotProtos { * Protobuf type {@code hbase.pb.SnapshotRegionManifest.FamilyFiles} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotRegionManifest.FamilyFiles) + org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2451,19 +2478,16 @@ public final class SnapshotProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getStoreFilesFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); familyName_ = com.google.protobuf.ByteString.EMPTY; @@ -2477,10 +2501,6 @@ public final class SnapshotProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor; @@ -2520,6 +2540,32 @@ public final class SnapshotProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles)other); @@ -2553,25 +2599,24 @@ public final class SnapshotProtos { storeFiles_ = other.storeFiles_; bitField0_ = (bitField0_ & ~0x00000002); storeFilesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStoreFilesFieldBuilder() : null; } else { storeFilesBuilder_.addAllMessages(other.storeFiles_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFamilyName()) { - return false; } for (int i = 0; i < getStoreFilesCount(); i++) { if (!getStoreFiles(i).isInitialized()) { - return false; } } @@ -2587,7 +2632,7 @@ public final class SnapshotProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2597,7 +2642,6 @@ public final class SnapshotProtos { } private int bitField0_; - // required bytes family_name = 1; private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family_name = 1; @@ -2633,7 +2677,6 @@ public final class SnapshotProtos { return this; } - // repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2; private java.util.List storeFiles_ = java.util.Collections.emptyList(); private void ensureStoreFilesIsMutable() { @@ -2643,7 +2686,7 @@ public final class SnapshotProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder> storeFilesBuilder_; /** @@ -2775,7 +2818,8 @@ public final class SnapshotProtos { java.lang.Iterable values) { if (storeFilesBuilder_ == null) { ensureStoreFilesIsMutable(); - super.addAll(values, storeFiles_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, storeFiles_); onChanged(); } else { storeFilesBuilder_.addAllMessages(values); @@ -2858,11 +2902,11 @@ public final class SnapshotProtos { getStoreFilesBuilderList() { return getStoreFilesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder> getStoreFilesFieldBuilder() { if (storeFilesBuilder_ == null) { - storeFilesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + storeFilesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder>( storeFiles_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -2872,20 +2916,56 @@ public final class SnapshotProtos { } return storeFilesBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRegionManifest.FamilyFiles) } + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest.FamilyFiles) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles DEFAULT_INSTANCE; static { - defaultInstance = new FamilyFiles(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FamilyFiles parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FamilyFiles(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest.FamilyFiles) } private int bitField0_; - // optional int32 version = 1; public static final int VERSION_FIELD_NUMBER = 1; private int version_; /** @@ -2901,7 +2981,6 @@ public final class SnapshotProtos { return version_; } - // required .hbase.pb.RegionInfo region_info = 2; public static final int REGION_INFO_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; /** @@ -2914,16 +2993,15 @@ public final class SnapshotProtos { * required .hbase.pb.RegionInfo region_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } /** * required .hbase.pb.RegionInfo region_info = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } - // repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3; public static final int FAMILY_FILES_FIELD_NUMBER = 3; private java.util.List familyFiles_; /** @@ -2959,15 +3037,11 @@ public final class SnapshotProtos { return familyFiles_.get(index); } - private void initFields() { - version_ = 0; - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - familyFiles_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegionInfo()) { memoizedIsInitialized = 0; @@ -2989,22 +3063,20 @@ public final class SnapshotProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, version_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, regionInfo_); + output.writeMessage(2, getRegionInfo()); } for (int i = 0; i < familyFiles_.size(); i++) { output.writeMessage(3, familyFiles_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -3014,25 +3086,19 @@ public final class SnapshotProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, regionInfo_); + .computeMessageSize(2, getRegionInfo()); } for (int i = 0; i < familyFiles_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, familyFiles_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3055,12 +3121,10 @@ public final class SnapshotProtos { } result = result && getFamilyFilesList() .equals(other.getFamilyFilesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3080,7 +3144,7 @@ public final class SnapshotProtos { hash = (37 * hash) + FAMILY_FILES_FIELD_NUMBER; hash = (53 * hash) + getFamilyFilesList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3108,46 +3172,57 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3155,14 +3230,15 @@ public final class SnapshotProtos { * Protobuf type {@code hbase.pb.SnapshotRegionManifest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotRegionManifest) + org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3175,26 +3251,23 @@ public final class SnapshotProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionInfoFieldBuilder(); getFamilyFilesFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); version_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + regionInfo_ = null; } else { regionInfoBuilder_.clear(); } @@ -3208,10 +3281,6 @@ public final class SnapshotProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_descriptor; @@ -3259,6 +3328,32 @@ public final class SnapshotProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest)other); @@ -3295,29 +3390,27 @@ public final class SnapshotProtos { familyFiles_ = other.familyFiles_; bitField0_ = (bitField0_ & ~0x00000004); familyFilesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFamilyFilesFieldBuilder() : null; } else { familyFilesBuilder_.addAllMessages(other.familyFiles_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionInfo()) { - return false; } if (!getRegionInfo().isInitialized()) { - return false; } for (int i = 0; i < getFamilyFilesCount(); i++) { if (!getFamilyFiles(i).isInitialized()) { - return false; } } @@ -3333,7 +3426,7 @@ public final class SnapshotProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3343,7 +3436,6 @@ public final class SnapshotProtos { } private int bitField0_; - // optional int32 version = 1; private int version_ ; /** * optional int32 version = 1; @@ -3376,9 +3468,8 @@ public final class SnapshotProtos { return this; } - // required .hbase.pb.RegionInfo region_info = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** * required .hbase.pb.RegionInfo region_info = 2; @@ -3391,7 +3482,7 @@ public final class SnapshotProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { if (regionInfoBuilder_ == null) { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } else { return regionInfoBuilder_.getMessage(); } @@ -3432,6 +3523,7 @@ public final class SnapshotProtos { public Builder mergeRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + regionInfo_ != null && regionInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); @@ -3450,7 +3542,7 @@ public final class SnapshotProtos { */ public Builder clearRegionInfo() { if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + regionInfo_ = null; onChanged(); } else { regionInfoBuilder_.clear(); @@ -3473,19 +3565,20 @@ public final class SnapshotProtos { if (regionInfoBuilder_ != null) { return regionInfoBuilder_.getMessageOrBuilder(); } else { - return regionInfo_; + return regionInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } } /** * required .hbase.pb.RegionInfo region_info = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - regionInfo_, + getRegionInfo(), getParentForChildren(), isClean()); regionInfo_ = null; @@ -3493,7 +3586,6 @@ public final class SnapshotProtos { return regionInfoBuilder_; } - // repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3; private java.util.List familyFiles_ = java.util.Collections.emptyList(); private void ensureFamilyFilesIsMutable() { @@ -3503,7 +3595,7 @@ public final class SnapshotProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder> familyFilesBuilder_; /** @@ -3635,7 +3727,8 @@ public final class SnapshotProtos { java.lang.Iterable values) { if (familyFilesBuilder_ == null) { ensureFamilyFilesIsMutable(); - super.addAll(values, familyFiles_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, familyFiles_); onChanged(); } else { familyFilesBuilder_.addAllMessages(values); @@ -3718,11 +3811,11 @@ public final class SnapshotProtos { getFamilyFilesBuilderList() { return getFamilyFilesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder> getFamilyFilesFieldBuilder() { if (familyFilesBuilder_ == null) { - familyFilesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + familyFilesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder>( familyFiles_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -3732,22 +3825,59 @@ public final class SnapshotProtos { } return familyFilesBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRegionManifest) } + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest DEFAULT_INSTANCE; static { - defaultInstance = new SnapshotRegionManifest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SnapshotRegionManifest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SnapshotRegionManifest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest) } - public interface SnapshotDataManifestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SnapshotDataManifestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotDataManifest) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableSchema table_schema = 1; /** * required .hbase.pb.TableSchema table_schema = 1; */ @@ -3761,7 +3891,6 @@ public final class SnapshotProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); - // repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2; /** * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2; */ @@ -3789,36 +3918,28 @@ public final class SnapshotProtos { /** * Protobuf type {@code hbase.pb.SnapshotDataManifest} */ - public static final class SnapshotDataManifest extends - com.google.protobuf.GeneratedMessage - implements SnapshotDataManifestOrBuilder { + public static final class SnapshotDataManifest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotDataManifest) + SnapshotDataManifestOrBuilder { // Use SnapshotDataManifest.newBuilder() to construct. - private SnapshotDataManifest(com.google.protobuf.GeneratedMessage.Builder builder) { + private SnapshotDataManifest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private SnapshotDataManifest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SnapshotDataManifest defaultInstance; - public static SnapshotDataManifest getDefaultInstance() { - return defaultInstance; - } - - public SnapshotDataManifest getDefaultInstanceForType() { - return defaultInstance; + private SnapshotDataManifest() { + regionManifests_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SnapshotDataManifest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3855,7 +3976,8 @@ public final class SnapshotProtos { regionManifests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } - regionManifests_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.PARSER, extensionRegistry)); + regionManifests_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.PARSER, extensionRegistry)); break; } } @@ -3864,7 +3986,7 @@ public final class SnapshotProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { regionManifests_ = java.util.Collections.unmodifiableList(regionManifests_); @@ -3878,30 +4000,14 @@ public final class SnapshotProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SnapshotDataManifest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SnapshotDataManifest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableSchema table_schema = 1; public static final int TABLE_SCHEMA_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_; /** @@ -3914,16 +4020,15 @@ public final class SnapshotProtos { * required .hbase.pb.TableSchema table_schema = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } /** * required .hbase.pb.TableSchema table_schema = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } - // repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2; public static final int REGION_MANIFESTS_FIELD_NUMBER = 2; private java.util.List regionManifests_; /** @@ -3959,14 +4064,11 @@ public final class SnapshotProtos { return regionManifests_.get(index); } - private void initFields() { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - regionManifests_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableSchema()) { memoizedIsInitialized = 0; @@ -3988,43 +4090,35 @@ public final class SnapshotProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableSchema_); + output.writeMessage(1, getTableSchema()); } for (int i = 0; i < regionManifests_.size(); i++) { output.writeMessage(2, regionManifests_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableSchema_); + .computeMessageSize(1, getTableSchema()); } for (int i = 0; i < regionManifests_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, regionManifests_.get(i)); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4042,12 +4136,10 @@ public final class SnapshotProtos { } result = result && getRegionManifestsList() .equals(other.getRegionManifestsList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4063,7 +4155,7 @@ public final class SnapshotProtos { hash = (37 * hash) + REGION_MANIFESTS_FIELD_NUMBER; hash = (53 * hash) + getRegionManifestsList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4091,46 +4183,57 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4138,14 +4241,15 @@ public final class SnapshotProtos { * Protobuf type {@code hbase.pb.SnapshotDataManifest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifestOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotDataManifest) + org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4158,24 +4262,21 @@ public final class SnapshotProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableSchemaFieldBuilder(); getRegionManifestsFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; } else { tableSchemaBuilder_.clear(); } @@ -4189,10 +4290,6 @@ public final class SnapshotProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_descriptor; @@ -4236,6 +4333,32 @@ public final class SnapshotProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest)other); @@ -4269,29 +4392,27 @@ public final class SnapshotProtos { regionManifests_ = other.regionManifests_; bitField0_ = (bitField0_ & ~0x00000002); regionManifestsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionManifestsFieldBuilder() : null; } else { regionManifestsBuilder_.addAllMessages(other.regionManifests_); } } } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableSchema()) { - return false; } if (!getTableSchema().isInitialized()) { - return false; } for (int i = 0; i < getRegionManifestsCount(); i++) { if (!getRegionManifests(i).isInitialized()) { - return false; } } @@ -4307,7 +4428,7 @@ public final class SnapshotProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4317,9 +4438,8 @@ public final class SnapshotProtos { } private int bitField0_; - // required .hbase.pb.TableSchema table_schema = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** * required .hbase.pb.TableSchema table_schema = 1; @@ -4332,7 +4452,7 @@ public final class SnapshotProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { - return tableSchema_; + return tableSchema_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } else { return tableSchemaBuilder_.getMessage(); } @@ -4373,6 +4493,7 @@ public final class SnapshotProtos { public Builder mergeTableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableSchema_ != null && tableSchema_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); @@ -4391,7 +4512,7 @@ public final class SnapshotProtos { */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { - tableSchema_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + tableSchema_ = null; onChanged(); } else { tableSchemaBuilder_.clear(); @@ -4414,19 +4535,20 @@ public final class SnapshotProtos { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); } else { - return tableSchema_; + return tableSchema_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance() : tableSchema_; } } /** * required .hbase.pb.TableSchema table_schema = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { - tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( - tableSchema_, + getTableSchema(), getParentForChildren(), isClean()); tableSchema_ = null; @@ -4434,7 +4556,6 @@ public final class SnapshotProtos { return tableSchemaBuilder_; } - // repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2; private java.util.List regionManifests_ = java.util.Collections.emptyList(); private void ensureRegionManifestsIsMutable() { @@ -4444,7 +4565,7 @@ public final class SnapshotProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder> regionManifestsBuilder_; /** @@ -4576,7 +4697,8 @@ public final class SnapshotProtos { java.lang.Iterable values) { if (regionManifestsBuilder_ == null) { ensureRegionManifestsIsMutable(); - super.addAll(values, regionManifests_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regionManifests_); onChanged(); } else { regionManifestsBuilder_.addAllMessages(values); @@ -4659,11 +4781,11 @@ public final class SnapshotProtos { getRegionManifestsBuilderList() { return getRegionManifestsFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder> getRegionManifestsFieldBuilder() { if (regionManifestsBuilder_ == null) { - regionManifestsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + regionManifestsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder>( regionManifests_, ((bitField0_ & 0x00000002) == 0x00000002), @@ -4673,49 +4795,86 @@ public final class SnapshotProtos { } return regionManifestsBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDataManifest) } + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDataManifest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest DEFAULT_INSTANCE; static { - defaultInstance = new SnapshotDataManifest(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SnapshotDataManifest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SnapshotDataManifest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDataManifest) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotFileInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotRegionManifest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotDataManifest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -4740,49 +4899,51 @@ public final class SnapshotProtos { "edB\016SnapshotProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_SnapshotFileInfo_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SnapshotFileInfo_descriptor, - new java.lang.String[] { "Type", "Hfile", "WalServer", "WalName", }); - internal_static_hbase_pb_SnapshotRegionManifest_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SnapshotRegionManifest_descriptor, - new java.lang.String[] { "Version", "RegionInfo", "FamilyFiles", }); - internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor = - internal_static_hbase_pb_SnapshotRegionManifest_descriptor.getNestedTypes().get(0); - internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor, - new java.lang.String[] { "Name", "Reference", "FileSize", }); - internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor = - internal_static_hbase_pb_SnapshotRegionManifest_descriptor.getNestedTypes().get(1); - internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor, - new java.lang.String[] { "FamilyName", "StoreFiles", }); - internal_static_hbase_pb_SnapshotDataManifest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SnapshotDataManifest_descriptor, - new java.lang.String[] { "TableSchema", "RegionManifests", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_SnapshotFileInfo_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SnapshotFileInfo_descriptor, + new java.lang.String[] { "Type", "Hfile", "WalServer", "WalName", }); + internal_static_hbase_pb_SnapshotRegionManifest_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SnapshotRegionManifest_descriptor, + new java.lang.String[] { "Version", "RegionInfo", "FamilyFiles", }); + internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor = + internal_static_hbase_pb_SnapshotRegionManifest_descriptor.getNestedTypes().get(0); + internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor, + new java.lang.String[] { "Name", "Reference", "FileSize", }); + internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor = + internal_static_hbase_pb_SnapshotRegionManifest_descriptor.getNestedTypes().get(1); + internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor, + new java.lang.String[] { "FamilyName", "StoreFiles", }); + internal_static_hbase_pb_SnapshotDataManifest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SnapshotDataManifest_descriptor, + new java.lang.String[] { "TableSchema", "RegionManifests", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java index 893fc62..940a498 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class TracingProtos { private TracingProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface RPCTInfoOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RPCTInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RPCTInfo) + com.google.protobuf.MessageOrBuilder { - // optional int64 trace_id = 1; /** * optional int64 trace_id = 1; */ @@ -21,7 +27,6 @@ public final class TracingProtos { */ long getTraceId(); - // optional int64 parent_id = 2; /** * optional int64 parent_id = 2; */ @@ -32,8 +37,6 @@ public final class TracingProtos { long getParentId(); } /** - * Protobuf type {@code hbase.pb.RPCTInfo} - * *
    *Used to pass through the information necessary to continue
    *a trace after an RPC is made. All we need is the traceid 
@@ -41,37 +44,32 @@ public final class TracingProtos {
    *the id of the current span when this message was sent, so we know 
    *what span caused the new span we will create when this message is received.
    * 
+ * + * Protobuf type {@code hbase.pb.RPCTInfo} */ - public static final class RPCTInfo extends - com.google.protobuf.GeneratedMessage - implements RPCTInfoOrBuilder { + public static final class RPCTInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RPCTInfo) + RPCTInfoOrBuilder { // Use RPCTInfo.newBuilder() to construct. - private RPCTInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + private RPCTInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private RPCTInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RPCTInfo defaultInstance; - public static RPCTInfo getDefaultInstance() { - return defaultInstance; - } - - public RPCTInfo getDefaultInstanceForType() { - return defaultInstance; + private RPCTInfo() { + traceId_ = 0L; + parentId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RPCTInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -106,7 +104,7 @@ public final class TracingProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -117,30 +115,14 @@ public final class TracingProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RPCTInfo parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RPCTInfo(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional int64 trace_id = 1; public static final int TRACE_ID_FIELD_NUMBER = 1; private long traceId_; /** @@ -156,7 +138,6 @@ public final class TracingProtos { return traceId_; } - // optional int64 parent_id = 2; public static final int PARENT_ID_FIELD_NUMBER = 2; private long parentId_; /** @@ -172,14 +153,11 @@ public final class TracingProtos { return parentId_; } - private void initFields() { - traceId_ = 0L; - parentId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -187,19 +165,17 @@ public final class TracingProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, traceId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt64(2, parentId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -211,19 +187,13 @@ public final class TracingProtos { size += com.google.protobuf.CodedOutputStream .computeInt64Size(2, parentId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -244,12 +214,10 @@ public final class TracingProtos { result = result && (getParentId() == other.getParentId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -259,13 +227,15 @@ public final class TracingProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTraceId()) { hash = (37 * hash) + TRACE_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTraceId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTraceId()); } if (hasParentId()) { hash = (37 * hash) + PARENT_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getParentId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getParentId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -293,52 +263,61 @@ public final class TracingProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RPCTInfo} - * *
      *Used to pass through the information necessary to continue
      *a trace after an RPC is made. All we need is the traceid 
@@ -346,16 +325,19 @@ public final class TracingProtos {
      *the id of the current span when this message was sent, so we know 
      *what span caused the new span we will create when this message is received.
      * 
+ * + * Protobuf type {@code hbase.pb.RPCTInfo} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RPCTInfo) + org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -368,18 +350,15 @@ public final class TracingProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); traceId_ = 0L; @@ -389,10 +368,6 @@ public final class TracingProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_descriptor; @@ -427,6 +402,32 @@ public final class TracingProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo)other); @@ -444,7 +445,8 @@ public final class TracingProtos { if (other.hasParentId()) { setParentId(other.getParentId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -461,7 +463,7 @@ public final class TracingProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -471,7 +473,6 @@ public final class TracingProtos { } private int bitField0_; - // optional int64 trace_id = 1; private long traceId_ ; /** * optional int64 trace_id = 1; @@ -504,7 +505,6 @@ public final class TracingProtos { return this; } - // optional int64 parent_id = 2; private long parentId_ ; /** * optional int64 parent_id = 2; @@ -536,29 +536,66 @@ public final class TracingProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RPCTInfo) } + // @@protoc_insertion_point(class_scope:hbase.pb.RPCTInfo) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo DEFAULT_INSTANCE; static { - defaultInstance = new RPCTInfo(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RPCTInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RPCTInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RPCTInfo) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RPCTInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RPCTInfo_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -568,23 +605,23 @@ public final class TracingProtos { "neratedB\rTracingProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_RPCTInfo_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_RPCTInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RPCTInfo_descriptor, - new java.lang.String[] { "TraceId", "ParentId", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); + internal_static_hbase_pb_RPCTInfo_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_RPCTInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RPCTInfo_descriptor, + new java.lang.String[] { "TraceId", "ParentId", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java index 9513ccb..e032be7 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java @@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class WALProtos { private WALProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } /** * Protobuf enum {@code hbase.pb.ScopeType} @@ -16,15 +22,15 @@ public final class WALProtos { /** * REPLICATION_SCOPE_LOCAL = 0; */ - REPLICATION_SCOPE_LOCAL(0, 0), + REPLICATION_SCOPE_LOCAL(0), /** * REPLICATION_SCOPE_GLOBAL = 1; */ - REPLICATION_SCOPE_GLOBAL(1, 1), + REPLICATION_SCOPE_GLOBAL(1), /** * REPLICATION_SCOPE_SERIAL = 2; */ - REPLICATION_SCOPE_SERIAL(2, 2), + REPLICATION_SCOPE_SERIAL(2), ; /** @@ -41,9 +47,19 @@ public final class WALProtos { public static final int REPLICATION_SCOPE_SERIAL_VALUE = 2; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static ScopeType valueOf(int value) { + return forNumber(value); + } + + public static ScopeType forNumber(int value) { switch (value) { case 0: return REPLICATION_SCOPE_LOCAL; case 1: return REPLICATION_SCOPE_GLOBAL; @@ -56,17 +72,17 @@ public final class WALProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + ScopeType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public ScopeType findValueByNumber(int number) { - return ScopeType.valueOf(number); + return ScopeType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -88,21 +104,19 @@ public final class WALProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private ScopeType(int index, int value) { - this.index = index; + private ScopeType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.ScopeType) } - public interface WALHeaderOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface WALHeaderOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.WALHeader) + com.google.protobuf.MessageOrBuilder { - // optional bool has_compression = 1; /** * optional bool has_compression = 1; */ @@ -112,7 +126,6 @@ public final class WALProtos { */ boolean getHasCompression(); - // optional bytes encryption_key = 2; /** * optional bytes encryption_key = 2; */ @@ -122,7 +135,6 @@ public final class WALProtos { */ com.google.protobuf.ByteString getEncryptionKey(); - // optional bool has_tag_compression = 3; /** * optional bool has_tag_compression = 3; */ @@ -132,7 +144,6 @@ public final class WALProtos { */ boolean getHasTagCompression(); - // optional string writer_cls_name = 4; /** * optional string writer_cls_name = 4; */ @@ -147,7 +158,6 @@ public final class WALProtos { com.google.protobuf.ByteString getWriterClsNameBytes(); - // optional string cell_codec_cls_name = 5; /** * optional string cell_codec_cls_name = 5; */ @@ -165,36 +175,32 @@ public final class WALProtos { /** * Protobuf type {@code hbase.pb.WALHeader} */ - public static final class WALHeader extends - com.google.protobuf.GeneratedMessage - implements WALHeaderOrBuilder { + public static final class WALHeader extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.WALHeader) + WALHeaderOrBuilder { // Use WALHeader.newBuilder() to construct. - private WALHeader(com.google.protobuf.GeneratedMessage.Builder builder) { + private WALHeader(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private WALHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final WALHeader defaultInstance; - public static WALHeader getDefaultInstance() { - return defaultInstance; - } - - public WALHeader getDefaultInstanceForType() { - return defaultInstance; + private WALHeader() { + hasCompression_ = false; + encryptionKey_ = com.google.protobuf.ByteString.EMPTY; + hasTagCompression_ = false; + writerClsName_ = ""; + cellCodecClsName_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private WALHeader( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -229,13 +235,15 @@ public final class WALProtos { break; } case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; - writerClsName_ = input.readBytes(); + writerClsName_ = bs; break; } case 42: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; - cellCodecClsName_ = input.readBytes(); + cellCodecClsName_ = bs; break; } } @@ -244,7 +252,7 @@ public final class WALProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -255,30 +263,14 @@ public final class WALProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WALHeader parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new WALHeader(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool has_compression = 1; public static final int HAS_COMPRESSION_FIELD_NUMBER = 1; private boolean hasCompression_; /** @@ -294,7 +286,6 @@ public final class WALProtos { return hasCompression_; } - // optional bytes encryption_key = 2; public static final int ENCRYPTION_KEY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString encryptionKey_; /** @@ -310,7 +301,6 @@ public final class WALProtos { return encryptionKey_; } - // optional bool has_tag_compression = 3; public static final int HAS_TAG_COMPRESSION_FIELD_NUMBER = 3; private boolean hasTagCompression_; /** @@ -326,9 +316,8 @@ public final class WALProtos { return hasTagCompression_; } - // optional string writer_cls_name = 4; public static final int WRITER_CLS_NAME_FIELD_NUMBER = 4; - private java.lang.Object writerClsName_; + private volatile java.lang.Object writerClsName_; /** * optional string writer_cls_name = 4; */ @@ -369,9 +358,8 @@ public final class WALProtos { } } - // optional string cell_codec_cls_name = 5; public static final int CELL_CODEC_CLS_NAME_FIELD_NUMBER = 5; - private java.lang.Object cellCodecClsName_; + private volatile java.lang.Object cellCodecClsName_; /** * optional string cell_codec_cls_name = 5; */ @@ -412,17 +400,11 @@ public final class WALProtos { } } - private void initFields() { - hasCompression_ = false; - encryptionKey_ = com.google.protobuf.ByteString.EMPTY; - hasTagCompression_ = false; - writerClsName_ = ""; - cellCodecClsName_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -430,7 +412,6 @@ public final class WALProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, hasCompression_); } @@ -441,17 +422,16 @@ public final class WALProtos { output.writeBool(3, hasTagCompression_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, getWriterClsNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, writerClsName_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBytes(5, getCellCodecClsNameBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, cellCodecClsName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -468,26 +448,18 @@ public final class WALProtos { .computeBoolSize(3, hasTagCompression_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getWriterClsNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, writerClsName_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(5, getCellCodecClsNameBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, cellCodecClsName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -523,12 +495,10 @@ public final class WALProtos { result = result && getCellCodecClsName() .equals(other.getCellCodecClsName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -538,7 +508,8 @@ public final class WALProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasHasCompression()) { hash = (37 * hash) + HAS_COMPRESSION_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getHasCompression()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getHasCompression()); } if (hasEncryptionKey()) { hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER; @@ -546,7 +517,8 @@ public final class WALProtos { } if (hasHasTagCompression()) { hash = (37 * hash) + HAS_TAG_COMPRESSION_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getHasTagCompression()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getHasTagCompression()); } if (hasWriterClsName()) { hash = (37 * hash) + WRITER_CLS_NAME_FIELD_NUMBER; @@ -556,7 +528,7 @@ public final class WALProtos { hash = (37 * hash) + CELL_CODEC_CLS_NAME_FIELD_NUMBER; hash = (53 * hash) + getCellCodecClsName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -584,46 +556,57 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -631,14 +614,15 @@ public final class WALProtos { * Protobuf type {@code hbase.pb.WALHeader} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeaderOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.WALHeader) + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -651,18 +635,15 @@ public final class WALProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); hasCompression_ = false; @@ -678,10 +659,6 @@ public final class WALProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor; @@ -728,6 +705,32 @@ public final class WALProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader)other); @@ -758,7 +761,8 @@ public final class WALProtos { cellCodecClsName_ = other.cellCodecClsName_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -775,7 +779,7 @@ public final class WALProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -785,7 +789,6 @@ public final class WALProtos { } private int bitField0_; - // optional bool has_compression = 1; private boolean hasCompression_ ; /** * optional bool has_compression = 1; @@ -818,7 +821,6 @@ public final class WALProtos { return this; } - // optional bytes encryption_key = 2; private com.google.protobuf.ByteString encryptionKey_ = com.google.protobuf.ByteString.EMPTY; /** * optional bytes encryption_key = 2; @@ -854,7 +856,6 @@ public final class WALProtos { return this; } - // optional bool has_tag_compression = 3; private boolean hasTagCompression_ ; /** * optional bool has_tag_compression = 3; @@ -887,7 +888,6 @@ public final class WALProtos { return this; } - // optional string writer_cls_name = 4; private java.lang.Object writerClsName_ = ""; /** * optional string writer_cls_name = 4; @@ -901,9 +901,12 @@ public final class WALProtos { public java.lang.String getWriterClsName() { java.lang.Object ref = writerClsName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - writerClsName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + writerClsName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -961,7 +964,6 @@ public final class WALProtos { return this; } - // optional string cell_codec_cls_name = 5; private java.lang.Object cellCodecClsName_ = ""; /** * optional string cell_codec_cls_name = 5; @@ -975,9 +977,12 @@ public final class WALProtos { public java.lang.String getCellCodecClsName() { java.lang.Object ref = cellCodecClsName_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - cellCodecClsName_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + cellCodecClsName_ = s; + } return s; } else { return (java.lang.String) ref; @@ -1034,22 +1039,59 @@ public final class WALProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.WALHeader) } + // @@protoc_insertion_point(class_scope:hbase.pb.WALHeader) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader DEFAULT_INSTANCE; static { - defaultInstance = new WALHeader(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public WALHeader parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WALHeader(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.WALHeader) } - public interface WALKeyOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface WALKeyOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.WALKey) + com.google.protobuf.MessageOrBuilder { - // required bytes encoded_region_name = 1; /** * required bytes encoded_region_name = 1; */ @@ -1059,7 +1101,6 @@ public final class WALProtos { */ com.google.protobuf.ByteString getEncodedRegionName(); - // required bytes table_name = 2; /** * required bytes table_name = 2; */ @@ -1069,7 +1110,6 @@ public final class WALProtos { */ com.google.protobuf.ByteString getTableName(); - // required uint64 log_sequence_number = 3; /** * required uint64 log_sequence_number = 3; */ @@ -1079,7 +1119,6 @@ public final class WALProtos { */ long getLogSequenceNumber(); - // required uint64 write_time = 4; /** * required uint64 write_time = 4; */ @@ -1089,45 +1128,40 @@ public final class WALProtos { */ long getWriteTime(); - // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated boolean hasClusterId(); /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterId(); /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder(); - // repeated .hbase.pb.FamilyScope scopes = 6; /** * repeated .hbase.pb.FamilyScope scopes = 6; */ @@ -1152,7 +1186,6 @@ public final class WALProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder( int index); - // optional uint32 following_kv_count = 7; /** * optional uint32 following_kv_count = 7; */ @@ -1162,62 +1195,55 @@ public final class WALProtos { */ int getFollowingKvCount(); - // repeated .hbase.pb.UUID cluster_ids = 8; /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ java.util.List getClusterIdsList(); /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterIds(int index); /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ int getClusterIdsCount(); /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ java.util.List getClusterIdsOrBuilderList(); /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder( int index); - // optional uint64 nonceGroup = 9; /** * optional uint64 nonceGroup = 9; */ @@ -1227,7 +1253,6 @@ public final class WALProtos { */ long getNonceGroup(); - // optional uint64 nonce = 10; /** * optional uint64 nonce = 10; */ @@ -1237,7 +1262,6 @@ public final class WALProtos { */ long getNonce(); - // optional uint64 orig_sequence_number = 11; /** * optional uint64 orig_sequence_number = 11; */ @@ -1248,44 +1272,44 @@ public final class WALProtos { long getOrigSequenceNumber(); } /** - * Protobuf type {@code hbase.pb.WALKey} - * *
-   *
    * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header
    * for some KVs
    * 
+ * + * Protobuf type {@code hbase.pb.WALKey} */ - public static final class WALKey extends - com.google.protobuf.GeneratedMessage - implements WALKeyOrBuilder { + public static final class WALKey extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.WALKey) + WALKeyOrBuilder { // Use WALKey.newBuilder() to construct. - private WALKey(com.google.protobuf.GeneratedMessage.Builder builder) { + private WALKey(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private WALKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final WALKey defaultInstance; - public static WALKey getDefaultInstance() { - return defaultInstance; } - - public WALKey getDefaultInstanceForType() { - return defaultInstance; + private WALKey() { + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + tableName_ = com.google.protobuf.ByteString.EMPTY; + logSequenceNumber_ = 0L; + writeTime_ = 0L; + scopes_ = java.util.Collections.emptyList(); + followingKvCount_ = 0; + clusterIds_ = java.util.Collections.emptyList(); + nonceGroup_ = 0L; + nonce_ = 0L; + origSequenceNumber_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private WALKey( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1342,7 +1366,8 @@ public final class WALProtos { scopes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000020; } - scopes_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.PARSER, extensionRegistry)); + scopes_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.PARSER, extensionRegistry)); break; } case 56: { @@ -1355,7 +1380,8 @@ public final class WALProtos { clusterIds_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000080; } - clusterIds_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry)); + clusterIds_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry)); break; } case 72: { @@ -1379,7 +1405,7 @@ public final class WALProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { scopes_ = java.util.Collections.unmodifiableList(scopes_); @@ -1396,30 +1422,14 @@ public final class WALProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WALKey parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new WALKey(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes encoded_region_name = 1; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString encodedRegionName_; /** @@ -1435,7 +1445,6 @@ public final class WALProtos { return encodedRegionName_; } - // required bytes table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString tableName_; /** @@ -1451,7 +1460,6 @@ public final class WALProtos { return tableName_; } - // required uint64 log_sequence_number = 3; public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 3; private long logSequenceNumber_; /** @@ -1467,7 +1475,6 @@ public final class WALProtos { return logSequenceNumber_; } - // required uint64 write_time = 4; public static final int WRITE_TIME_FIELD_NUMBER = 4; private long writeTime_; /** @@ -1483,53 +1490,48 @@ public final class WALProtos { return writeTime_; } - // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; public static final int CLUSTER_ID_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID clusterId_; /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterId() { - return clusterId_; + return clusterId_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance() : clusterId_; } /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() { - return clusterId_; + return clusterId_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance() : clusterId_; } - // repeated .hbase.pb.FamilyScope scopes = 6; public static final int SCOPES_FIELD_NUMBER = 6; private java.util.List scopes_; /** @@ -1565,7 +1567,6 @@ public final class WALProtos { return scopes_.get(index); } - // optional uint32 following_kv_count = 7; public static final int FOLLOWING_KV_COUNT_FIELD_NUMBER = 7; private int followingKvCount_; /** @@ -1581,73 +1582,66 @@ public final class WALProtos { return followingKvCount_; } - // repeated .hbase.pb.UUID cluster_ids = 8; public static final int CLUSTER_IDS_FIELD_NUMBER = 8; private java.util.List clusterIds_; /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public java.util.List getClusterIdsList() { return clusterIds_; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public java.util.List getClusterIdsOrBuilderList() { return clusterIds_; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public int getClusterIdsCount() { return clusterIds_.size(); } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) { return clusterIds_.get(index); } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder( int index) { return clusterIds_.get(index); } - // optional uint64 nonceGroup = 9; public static final int NONCEGROUP_FIELD_NUMBER = 9; private long nonceGroup_; /** @@ -1663,7 +1657,6 @@ public final class WALProtos { return nonceGroup_; } - // optional uint64 nonce = 10; public static final int NONCE_FIELD_NUMBER = 10; private long nonce_; /** @@ -1679,7 +1672,6 @@ public final class WALProtos { return nonce_; } - // optional uint64 orig_sequence_number = 11; public static final int ORIG_SEQUENCE_NUMBER_FIELD_NUMBER = 11; private long origSequenceNumber_; /** @@ -1695,23 +1687,11 @@ public final class WALProtos { return origSequenceNumber_; } - private void initFields() { - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - tableName_ = com.google.protobuf.ByteString.EMPTY; - logSequenceNumber_ = 0L; - writeTime_ = 0L; - clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); - scopes_ = java.util.Collections.emptyList(); - followingKvCount_ = 0; - clusterIds_ = java.util.Collections.emptyList(); - nonceGroup_ = 0L; - nonce_ = 0L; - origSequenceNumber_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; @@ -1753,7 +1733,6 @@ public final class WALProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, encodedRegionName_); } @@ -1767,7 +1746,7 @@ public final class WALProtos { output.writeUInt64(4, writeTime_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, clusterId_); + output.writeMessage(5, getClusterId()); } for (int i = 0; i < scopes_.size(); i++) { output.writeMessage(6, scopes_.get(i)); @@ -1787,12 +1766,11 @@ public final class WALProtos { if (((bitField0_ & 0x00000100) == 0x00000100)) { output.writeUInt64(11, origSequenceNumber_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -1814,7 +1792,7 @@ public final class WALProtos { } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, clusterId_); + .computeMessageSize(5, getClusterId()); } for (int i = 0; i < scopes_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -1840,19 +1818,13 @@ public final class WALProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(11, origSequenceNumber_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1912,12 +1884,10 @@ public final class WALProtos { result = result && (getOrigSequenceNumber() == other.getOrigSequenceNumber()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1935,11 +1905,13 @@ public final class WALProtos { } if (hasLogSequenceNumber()) { hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLogSequenceNumber()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLogSequenceNumber()); } if (hasWriteTime()) { hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getWriteTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getWriteTime()); } if (hasClusterId()) { hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER; @@ -1959,17 +1931,20 @@ public final class WALProtos { } if (hasNonceGroup()) { hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNonce()); } if (hasOrigSequenceNumber()) { hash = (37 * hash) + ORIG_SEQUENCE_NUMBER_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getOrigSequenceNumber()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getOrigSequenceNumber()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1997,67 +1972,78 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.WALKey} - * *
-     *
      * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header
      * for some KVs
      * 
+ * + * Protobuf type {@code hbase.pb.WALKey} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.WALKey) + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2070,21 +2056,18 @@ public final class WALProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getClusterIdFieldBuilder(); getScopesFieldBuilder(); getClusterIdsFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; @@ -2096,7 +2079,7 @@ public final class WALProtos { writeTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); + clusterId_ = null; } else { clusterIdBuilder_.clear(); } @@ -2124,10 +2107,6 @@ public final class WALProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; @@ -2212,6 +2191,32 @@ public final class WALProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey)other); @@ -2257,7 +2262,7 @@ public final class WALProtos { scopes_ = other.scopes_; bitField0_ = (bitField0_ & ~0x00000020); scopesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getScopesFieldBuilder() : null; } else { scopesBuilder_.addAllMessages(other.scopes_); @@ -2286,7 +2291,7 @@ public final class WALProtos { clusterIds_ = other.clusterIds_; bitField0_ = (bitField0_ & ~0x00000080); clusterIdsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getClusterIdsFieldBuilder() : null; } else { clusterIdsBuilder_.addAllMessages(other.clusterIds_); @@ -2302,42 +2307,36 @@ public final class WALProtos { if (other.hasOrigSequenceNumber()) { setOrigSequenceNumber(other.getOrigSequenceNumber()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasEncodedRegionName()) { - return false; } if (!hasTableName()) { - return false; } if (!hasLogSequenceNumber()) { - return false; } if (!hasWriteTime()) { - return false; } if (hasClusterId()) { if (!getClusterId().isInitialized()) { - return false; } } for (int i = 0; i < getScopesCount(); i++) { if (!getScopes(i).isInitialized()) { - return false; } } for (int i = 0; i < getClusterIdsCount(); i++) { if (!getClusterIds(i).isInitialized()) { - return false; } } @@ -2353,7 +2352,7 @@ public final class WALProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2363,7 +2362,6 @@ public final class WALProtos { } private int bitField0_; - // required bytes encoded_region_name = 1; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes encoded_region_name = 1; @@ -2399,7 +2397,6 @@ public final class WALProtos { return this; } - // required bytes table_name = 2; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes table_name = 2; @@ -2435,7 +2432,6 @@ public final class WALProtos { return this; } - // required uint64 log_sequence_number = 3; private long logSequenceNumber_ ; /** * required uint64 log_sequence_number = 3; @@ -2468,7 +2464,6 @@ public final class WALProtos { return this; } - // required uint64 write_time = 4; private long writeTime_ ; /** * required uint64 write_time = 4; @@ -2501,52 +2496,48 @@ public final class WALProtos { return this; } - // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID clusterId_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdBuilder_; /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterId() { if (clusterIdBuilder_ == null) { - return clusterId_; + return clusterId_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance() : clusterId_; } else { return clusterIdBuilder_.getMessage(); } } /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public Builder setClusterId(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdBuilder_ == null) { @@ -2562,15 +2553,14 @@ public final class WALProtos { return this; } /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public Builder setClusterId( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { @@ -2584,19 +2574,19 @@ public final class WALProtos { return this; } /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public Builder mergeClusterId(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && + clusterId_ != null && clusterId_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()) { clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); @@ -2611,19 +2601,18 @@ public final class WALProtos { return this; } /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public Builder clearClusterId() { if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); + clusterId_ = null; onChanged(); } else { clusterIdBuilder_.clear(); @@ -2632,15 +2621,14 @@ public final class WALProtos { return this; } /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdBuilder() { bitField0_ |= 0x00000010; @@ -2648,41 +2636,40 @@ public final class WALProtos { return getClusterIdFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() { if (clusterIdBuilder_ != null) { return clusterIdBuilder_.getMessageOrBuilder(); } else { - return clusterId_; + return clusterId_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance() : clusterId_; } } /** - * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; - * *
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * 
+ * + * optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdFieldBuilder() { if (clusterIdBuilder_ == null) { - clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< + clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder>( - clusterId_, + getClusterId(), getParentForChildren(), isClean()); clusterId_ = null; @@ -2690,7 +2677,6 @@ public final class WALProtos { return clusterIdBuilder_; } - // repeated .hbase.pb.FamilyScope scopes = 6; private java.util.List scopes_ = java.util.Collections.emptyList(); private void ensureScopesIsMutable() { @@ -2700,7 +2686,7 @@ public final class WALProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder> scopesBuilder_; /** @@ -2832,7 +2818,8 @@ public final class WALProtos { java.lang.Iterable values) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); - super.addAll(values, scopes_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, scopes_); onChanged(); } else { scopesBuilder_.addAllMessages(values); @@ -2915,11 +2902,11 @@ public final class WALProtos { getScopesBuilderList() { return getScopesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder> getScopesFieldBuilder() { if (scopesBuilder_ == null) { - scopesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + scopesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder>( scopes_, ((bitField0_ & 0x00000020) == 0x00000020), @@ -2930,7 +2917,6 @@ public final class WALProtos { return scopesBuilder_; } - // optional uint32 following_kv_count = 7; private int followingKvCount_ ; /** * optional uint32 following_kv_count = 7; @@ -2963,7 +2949,6 @@ public final class WALProtos { return this; } - // repeated .hbase.pb.UUID cluster_ids = 8; private java.util.List clusterIds_ = java.util.Collections.emptyList(); private void ensureClusterIdsIsMutable() { @@ -2973,17 +2958,16 @@ public final class WALProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdsBuilder_; /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public java.util.List getClusterIdsList() { if (clusterIdsBuilder_ == null) { @@ -2993,13 +2977,12 @@ public final class WALProtos { } } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public int getClusterIdsCount() { if (clusterIdsBuilder_ == null) { @@ -3009,13 +2992,12 @@ public final class WALProtos { } } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) { if (clusterIdsBuilder_ == null) { @@ -3025,13 +3007,12 @@ public final class WALProtos { } } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public Builder setClusterIds( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) { @@ -3048,13 +3029,12 @@ public final class WALProtos { return this; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public Builder setClusterIds( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { @@ -3068,13 +3048,12 @@ public final class WALProtos { return this; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public Builder addClusterIds(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdsBuilder_ == null) { @@ -3090,13 +3069,12 @@ public final class WALProtos { return this; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public Builder addClusterIds( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) { @@ -3113,13 +3091,12 @@ public final class WALProtos { return this; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public Builder addClusterIds( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { @@ -3133,13 +3110,12 @@ public final class WALProtos { return this; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public Builder addClusterIds( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { @@ -3153,19 +3129,19 @@ public final class WALProtos { return this; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public Builder addAllClusterIds( java.lang.Iterable values) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); - super.addAll(values, clusterIds_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, clusterIds_); onChanged(); } else { clusterIdsBuilder_.addAllMessages(values); @@ -3173,13 +3149,12 @@ public final class WALProtos { return this; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public Builder clearClusterIds() { if (clusterIdsBuilder_ == null) { @@ -3192,13 +3167,12 @@ public final class WALProtos { return this; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public Builder removeClusterIds(int index) { if (clusterIdsBuilder_ == null) { @@ -3211,26 +3185,24 @@ public final class WALProtos { return this; } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdsBuilder( int index) { return getClusterIdsFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder( int index) { @@ -3240,13 +3212,12 @@ public final class WALProtos { } } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public java.util.List getClusterIdsOrBuilderList() { @@ -3257,26 +3228,24 @@ public final class WALProtos { } } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder addClusterIdsBuilder() { return getClusterIdsFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()); } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder addClusterIdsBuilder( int index) { @@ -3284,23 +3253,22 @@ public final class WALProtos { index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()); } /** - * repeated .hbase.pb.UUID cluster_ids = 8; - * *
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * 
+ * + * repeated .hbase.pb.UUID cluster_ids = 8; */ public java.util.List getClusterIdsBuilderList() { return getClusterIdsFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdsFieldBuilder() { if (clusterIdsBuilder_ == null) { - clusterIdsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + clusterIdsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder>( clusterIds_, ((bitField0_ & 0x00000080) == 0x00000080), @@ -3311,7 +3279,6 @@ public final class WALProtos { return clusterIdsBuilder_; } - // optional uint64 nonceGroup = 9; private long nonceGroup_ ; /** * optional uint64 nonceGroup = 9; @@ -3344,7 +3311,6 @@ public final class WALProtos { return this; } - // optional uint64 nonce = 10; private long nonce_ ; /** * optional uint64 nonce = 10; @@ -3377,7 +3343,6 @@ public final class WALProtos { return this; } - // optional uint64 orig_sequence_number = 11; private long origSequenceNumber_ ; /** * optional uint64 orig_sequence_number = 11; @@ -3409,22 +3374,59 @@ public final class WALProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.WALKey) } + // @@protoc_insertion_point(class_scope:hbase.pb.WALKey) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey DEFAULT_INSTANCE; static { - defaultInstance = new WALKey(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public WALKey parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WALKey(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.WALKey) } - public interface FamilyScopeOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FamilyScopeOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FamilyScope) + com.google.protobuf.MessageOrBuilder { - // required bytes family = 1; /** * required bytes family = 1; */ @@ -3434,7 +3436,6 @@ public final class WALProtos { */ com.google.protobuf.ByteString getFamily(); - // required .hbase.pb.ScopeType scope_type = 2; /** * required .hbase.pb.ScopeType scope_type = 2; */ @@ -3447,36 +3448,29 @@ public final class WALProtos { /** * Protobuf type {@code hbase.pb.FamilyScope} */ - public static final class FamilyScope extends - com.google.protobuf.GeneratedMessage - implements FamilyScopeOrBuilder { + public static final class FamilyScope extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FamilyScope) + FamilyScopeOrBuilder { // Use FamilyScope.newBuilder() to construct. - private FamilyScope(com.google.protobuf.GeneratedMessage.Builder builder) { + private FamilyScope(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FamilyScope(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FamilyScope defaultInstance; - public static FamilyScope getDefaultInstance() { - return defaultInstance; } - - public FamilyScope getDefaultInstanceForType() { - return defaultInstance; + private FamilyScope() { + family_ = com.google.protobuf.ByteString.EMPTY; + scopeType_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FamilyScope( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3507,7 +3501,7 @@ public final class WALProtos { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; - scopeType_ = value; + scopeType_ = rawValue; } break; } @@ -3517,7 +3511,7 @@ public final class WALProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -3528,30 +3522,14 @@ public final class WALProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FamilyScope parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FamilyScope(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes family = 1; public static final int FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString family_; /** @@ -3567,9 +3545,8 @@ public final class WALProtos { return family_; } - // required .hbase.pb.ScopeType scope_type = 2; public static final int SCOPE_TYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType scopeType_; + private int scopeType_; /** * required .hbase.pb.ScopeType scope_type = 2; */ @@ -3580,17 +3557,15 @@ public final class WALProtos { * required .hbase.pb.ScopeType scope_type = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType getScopeType() { - return scopeType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.valueOf(scopeType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL : result; } - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - scopeType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFamily()) { memoizedIsInitialized = 0; @@ -3606,19 +3581,17 @@ public final class WALProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, family_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, scopeType_.getNumber()); + output.writeEnum(2, scopeType_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -3628,21 +3601,15 @@ public final class WALProtos { } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, scopeType_.getNumber()); + .computeEnumSize(2, scopeType_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3660,15 +3627,12 @@ public final class WALProtos { } result = result && (hasScopeType() == other.hasScopeType()); if (hasScopeType()) { - result = result && - (getScopeType() == other.getScopeType()); + result = result && scopeType_ == other.scopeType_; } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3682,9 +3646,9 @@ public final class WALProtos { } if (hasScopeType()) { hash = (37 * hash) + SCOPE_TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getScopeType()); + hash = (53 * hash) + scopeType_; } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3712,46 +3676,57 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -3759,14 +3734,15 @@ public final class WALProtos { * Protobuf type {@code hbase.pb.FamilyScope} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FamilyScope) + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3779,31 +3755,24 @@ public final class WALProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); - scopeType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; + scopeType_ = 0; bitField0_ = (bitField0_ & ~0x00000002); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor; @@ -3838,6 +3807,32 @@ public final class WALProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope)other); @@ -3855,17 +3850,16 @@ public final class WALProtos { if (other.hasScopeType()) { setScopeType(other.getScopeType()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFamily()) { - return false; } if (!hasScopeType()) { - return false; } return true; @@ -3880,7 +3874,7 @@ public final class WALProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3890,7 +3884,6 @@ public final class WALProtos { } private int bitField0_; - // required bytes family = 1; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family = 1; @@ -3926,8 +3919,7 @@ public final class WALProtos { return this; } - // required .hbase.pb.ScopeType scope_type = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType scopeType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; + private int scopeType_ = 0; /** * required .hbase.pb.ScopeType scope_type = 2; */ @@ -3938,7 +3930,8 @@ public final class WALProtos { * required .hbase.pb.ScopeType scope_type = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType getScopeType() { - return scopeType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.valueOf(scopeType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL : result; } /** * required .hbase.pb.ScopeType scope_type = 2; @@ -3948,7 +3941,7 @@ public final class WALProtos { throw new NullPointerException(); } bitField0_ |= 0x00000002; - scopeType_ = value; + scopeType_ = value.getNumber(); onChanged(); return this; } @@ -3957,44 +3950,80 @@ public final class WALProtos { */ public Builder clearScopeType() { bitField0_ = (bitField0_ & ~0x00000002); - scopeType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; + scopeType_ = 0; onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FamilyScope) } + // @@protoc_insertion_point(class_scope:hbase.pb.FamilyScope) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope DEFAULT_INSTANCE; static { - defaultInstance = new FamilyScope(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FamilyScope parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FamilyScope(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FamilyScope) } - public interface CompactionDescriptorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface CompactionDescriptorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.CompactionDescriptor) + com.google.protobuf.MessageOrBuilder { - // required bytes table_name = 1; /** - * required bytes table_name = 1; - * *
      * TODO: WALKey already stores these, might remove
      * 
+ * + * required bytes table_name = 1; */ boolean hasTableName(); /** - * required bytes table_name = 1; - * *
      * TODO: WALKey already stores these, might remove
      * 
+ * + * required bytes table_name = 1; */ com.google.protobuf.ByteString getTableName(); - // required bytes encoded_region_name = 2; /** * required bytes encoded_region_name = 2; */ @@ -4004,7 +4033,6 @@ public final class WALProtos { */ com.google.protobuf.ByteString getEncodedRegionName(); - // required bytes family_name = 3; /** * required bytes family_name = 3; */ @@ -4014,48 +4042,46 @@ public final class WALProtos { */ com.google.protobuf.ByteString getFamilyName(); - // repeated string compaction_input = 4; /** - * repeated string compaction_input = 4; - * *
      * relative to store dir
      * 
+ * + * repeated string compaction_input = 4; */ java.util.List - getCompactionInputList(); + getCompactionInputList(); /** - * repeated string compaction_input = 4; - * *
      * relative to store dir
      * 
+ * + * repeated string compaction_input = 4; */ int getCompactionInputCount(); /** - * repeated string compaction_input = 4; - * *
      * relative to store dir
      * 
+ * + * repeated string compaction_input = 4; */ java.lang.String getCompactionInput(int index); /** - * repeated string compaction_input = 4; - * *
      * relative to store dir
      * 
+ * + * repeated string compaction_input = 4; */ com.google.protobuf.ByteString getCompactionInputBytes(int index); - // repeated string compaction_output = 5; /** * repeated string compaction_output = 5; */ java.util.List - getCompactionOutputList(); + getCompactionOutputList(); /** * repeated string compaction_output = 5; */ @@ -4070,54 +4096,50 @@ public final class WALProtos { com.google.protobuf.ByteString getCompactionOutputBytes(int index); - // required string store_home_dir = 6; /** - * required string store_home_dir = 6; - * *
      * relative to region dir
      * 
+ * + * required string store_home_dir = 6; */ boolean hasStoreHomeDir(); /** - * required string store_home_dir = 6; - * *
      * relative to region dir
      * 
+ * + * required string store_home_dir = 6; */ java.lang.String getStoreHomeDir(); /** - * required string store_home_dir = 6; - * *
      * relative to region dir
      * 
+ * + * required string store_home_dir = 6; */ com.google.protobuf.ByteString getStoreHomeDirBytes(); - // optional bytes region_name = 7; /** - * optional bytes region_name = 7; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 7; */ boolean hasRegionName(); /** - * optional bytes region_name = 7; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 7; */ com.google.protobuf.ByteString getRegionName(); } /** - * Protobuf type {@code hbase.pb.CompactionDescriptor} - * *
    **
    * Special WAL entry to hold all related to a compaction.
@@ -4125,37 +4147,37 @@ public final class WALProtos {
    * sufficient info in the below message to complete later
    * the * compaction should we fail the WAL write.
    * 
+ * + * Protobuf type {@code hbase.pb.CompactionDescriptor} */ - public static final class CompactionDescriptor extends - com.google.protobuf.GeneratedMessage - implements CompactionDescriptorOrBuilder { + public static final class CompactionDescriptor extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.CompactionDescriptor) + CompactionDescriptorOrBuilder { // Use CompactionDescriptor.newBuilder() to construct. - private CompactionDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { + private CompactionDescriptor(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private CompactionDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CompactionDescriptor defaultInstance; - public static CompactionDescriptor getDefaultInstance() { - return defaultInstance; + private CompactionDescriptor() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + familyName_ = com.google.protobuf.ByteString.EMPTY; + compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + storeHomeDir_ = ""; + regionName_ = com.google.protobuf.ByteString.EMPTY; } - public CompactionDescriptor getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private CompactionDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4190,24 +4212,27 @@ public final class WALProtos { break; } case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { compactionInput_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000008; } - compactionInput_.add(input.readBytes()); + compactionInput_.add(bs); break; } case 42: { + com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { compactionOutput_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000010; } - compactionOutput_.add(input.readBytes()); + compactionOutput_.add(bs); break; } case 50: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; - storeHomeDir_ = input.readBytes(); + storeHomeDir_ = bs; break; } case 58: { @@ -4221,13 +4246,13 @@ public final class WALProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { - compactionInput_ = new com.google.protobuf.UnmodifiableLazyStringList(compactionInput_); + compactionInput_ = compactionInput_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { - compactionOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(compactionOutput_); + compactionOutput_ = compactionOutput_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -4238,54 +4263,37 @@ public final class WALProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CompactionDescriptor parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CompactionDescriptor(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; /** - * required bytes table_name = 1; - * *
      * TODO: WALKey already stores these, might remove
      * 
+ * + * required bytes table_name = 1; */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required bytes table_name = 1; - * *
      * TODO: WALKey already stores these, might remove
      * 
+ * + * required bytes table_name = 1; */ public com.google.protobuf.ByteString getTableName() { return tableName_; } - // required bytes encoded_region_name = 2; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString encodedRegionName_; /** @@ -4301,7 +4309,6 @@ public final class WALProtos { return encodedRegionName_; } - // required bytes family_name = 3; public static final int FAMILY_NAME_FIELD_NUMBER = 3; private com.google.protobuf.ByteString familyName_; /** @@ -4317,59 +4324,57 @@ public final class WALProtos { return familyName_; } - // repeated string compaction_input = 4; public static final int COMPACTION_INPUT_FIELD_NUMBER = 4; private com.google.protobuf.LazyStringList compactionInput_; /** - * repeated string compaction_input = 4; - * *
      * relative to store dir
      * 
+ * + * repeated string compaction_input = 4; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getCompactionInputList() { return compactionInput_; } /** - * repeated string compaction_input = 4; - * *
      * relative to store dir
      * 
+ * + * repeated string compaction_input = 4; */ public int getCompactionInputCount() { return compactionInput_.size(); } /** - * repeated string compaction_input = 4; - * *
      * relative to store dir
      * 
+ * + * repeated string compaction_input = 4; */ public java.lang.String getCompactionInput(int index) { return compactionInput_.get(index); } /** - * repeated string compaction_input = 4; - * *
      * relative to store dir
      * 
+ * + * repeated string compaction_input = 4; */ public com.google.protobuf.ByteString getCompactionInputBytes(int index) { return compactionInput_.getByteString(index); } - // repeated string compaction_output = 5; public static final int COMPACTION_OUTPUT_FIELD_NUMBER = 5; private com.google.protobuf.LazyStringList compactionOutput_; /** * repeated string compaction_output = 5; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getCompactionOutputList() { return compactionOutput_; } @@ -4393,25 +4398,24 @@ public final class WALProtos { return compactionOutput_.getByteString(index); } - // required string store_home_dir = 6; public static final int STORE_HOME_DIR_FIELD_NUMBER = 6; - private java.lang.Object storeHomeDir_; + private volatile java.lang.Object storeHomeDir_; /** - * required string store_home_dir = 6; - * *
      * relative to region dir
      * 
+ * + * required string store_home_dir = 6; */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * required string store_home_dir = 6; - * *
      * relative to region dir
      * 
+ * + * required string store_home_dir = 6; */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; @@ -4428,11 +4432,11 @@ public final class WALProtos { } } /** - * required string store_home_dir = 6; - * *
      * relative to region dir
      * 
+ * + * required string store_home_dir = 6; */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { @@ -4448,43 +4452,34 @@ public final class WALProtos { } } - // optional bytes region_name = 7; public static final int REGION_NAME_FIELD_NUMBER = 7; private com.google.protobuf.ByteString regionName_; /** - * optional bytes region_name = 7; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 7; */ public boolean hasRegionName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional bytes region_name = 7; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 7; */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } - private void initFields() { - tableName_ = com.google.protobuf.ByteString.EMPTY; - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - familyName_ = com.google.protobuf.ByteString.EMPTY; - compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - storeHomeDir_ = ""; - regionName_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -4508,7 +4503,6 @@ public final class WALProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, tableName_); } @@ -4519,23 +4513,22 @@ public final class WALProtos { output.writeBytes(3, familyName_); } for (int i = 0; i < compactionInput_.size(); i++) { - output.writeBytes(4, compactionInput_.getByteString(i)); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, compactionInput_.getRaw(i)); } for (int i = 0; i < compactionOutput_.size(); i++) { - output.writeBytes(5, compactionOutput_.getByteString(i)); + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, compactionOutput_.getRaw(i)); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(6, getStoreHomeDirBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, storeHomeDir_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(7, regionName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -4554,8 +4547,7 @@ public final class WALProtos { { int dataSize = 0; for (int i = 0; i < compactionInput_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(compactionInput_.getByteString(i)); + dataSize += computeStringSizeNoTag(compactionInput_.getRaw(i)); } size += dataSize; size += 1 * getCompactionInputList().size(); @@ -4563,33 +4555,25 @@ public final class WALProtos { { int dataSize = 0; for (int i = 0; i < compactionOutput_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(compactionOutput_.getByteString(i)); + dataSize += computeStringSizeNoTag(compactionOutput_.getRaw(i)); } size += dataSize; size += 1 * getCompactionOutputList().size(); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(6, getStoreHomeDirBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, storeHomeDir_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(7, regionName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4629,12 +4613,10 @@ public final class WALProtos { result = result && getRegionName() .equals(other.getRegionName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4670,7 +4652,7 @@ public final class WALProtos { hash = (37 * hash) + REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getRegionName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4698,52 +4680,61 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.CompactionDescriptor} - * *
      **
      * Special WAL entry to hold all related to a compaction.
@@ -4751,16 +4742,19 @@ public final class WALProtos {
      * sufficient info in the below message to complete later
      * the * compaction should we fail the WAL write.
      * 
+ * + * Protobuf type {@code hbase.pb.CompactionDescriptor} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.CompactionDescriptor) + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4773,18 +4767,15 @@ public final class WALProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; @@ -4804,10 +4795,6 @@ public final class WALProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor; @@ -4842,14 +4829,12 @@ public final class WALProtos { } result.familyName_ = familyName_; if (((bitField0_ & 0x00000008) == 0x00000008)) { - compactionInput_ = new com.google.protobuf.UnmodifiableLazyStringList( - compactionInput_); + compactionInput_ = compactionInput_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000008); } result.compactionInput_ = compactionInput_; if (((bitField0_ & 0x00000010) == 0x00000010)) { - compactionOutput_ = new com.google.protobuf.UnmodifiableLazyStringList( - compactionOutput_); + compactionOutput_ = compactionOutput_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000010); } result.compactionOutput_ = compactionOutput_; @@ -4866,6 +4851,32 @@ public final class WALProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor)other); @@ -4914,25 +4925,22 @@ public final class WALProtos { if (other.hasRegionName()) { setRegionName(other.getRegionName()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!hasEncodedRegionName()) { - return false; } if (!hasFamilyName()) { - return false; } if (!hasStoreHomeDir()) { - return false; } return true; @@ -4947,7 +4955,7 @@ public final class WALProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4957,34 +4965,33 @@ public final class WALProtos { } private int bitField0_; - // required bytes table_name = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; /** - * required bytes table_name = 1; - * *
        * TODO: WALKey already stores these, might remove
        * 
+ * + * required bytes table_name = 1; */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required bytes table_name = 1; - * *
        * TODO: WALKey already stores these, might remove
        * 
+ * + * required bytes table_name = 1; */ public com.google.protobuf.ByteString getTableName() { return tableName_; } /** - * required bytes table_name = 1; - * *
        * TODO: WALKey already stores these, might remove
        * 
+ * + * required bytes table_name = 1; */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { @@ -4996,11 +5003,11 @@ public final class WALProtos { return this; } /** - * required bytes table_name = 1; - * *
        * TODO: WALKey already stores these, might remove
        * 
+ * + * required bytes table_name = 1; */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); @@ -5009,7 +5016,6 @@ public final class WALProtos { return this; } - // required bytes encoded_region_name = 2; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes encoded_region_name = 2; @@ -5045,7 +5051,6 @@ public final class WALProtos { return this; } - // required bytes family_name = 3; private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family_name = 3; @@ -5081,7 +5086,6 @@ public final class WALProtos { return this; } - // repeated string compaction_input = 4; private com.google.protobuf.LazyStringList compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureCompactionInputIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { @@ -5090,53 +5094,53 @@ public final class WALProtos { } } /** - * repeated string compaction_input = 4; - * *
        * relative to store dir
        * 
+ * + * repeated string compaction_input = 4; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getCompactionInputList() { - return java.util.Collections.unmodifiableList(compactionInput_); + return compactionInput_.getUnmodifiableView(); } /** - * repeated string compaction_input = 4; - * *
        * relative to store dir
        * 
+ * + * repeated string compaction_input = 4; */ public int getCompactionInputCount() { return compactionInput_.size(); } /** - * repeated string compaction_input = 4; - * *
        * relative to store dir
        * 
+ * + * repeated string compaction_input = 4; */ public java.lang.String getCompactionInput(int index) { return compactionInput_.get(index); } /** - * repeated string compaction_input = 4; - * *
        * relative to store dir
        * 
+ * + * repeated string compaction_input = 4; */ public com.google.protobuf.ByteString getCompactionInputBytes(int index) { return compactionInput_.getByteString(index); } /** - * repeated string compaction_input = 4; - * *
        * relative to store dir
        * 
+ * + * repeated string compaction_input = 4; */ public Builder setCompactionInput( int index, java.lang.String value) { @@ -5149,11 +5153,11 @@ public final class WALProtos { return this; } /** - * repeated string compaction_input = 4; - * *
        * relative to store dir
        * 
+ * + * repeated string compaction_input = 4; */ public Builder addCompactionInput( java.lang.String value) { @@ -5166,25 +5170,26 @@ public final class WALProtos { return this; } /** - * repeated string compaction_input = 4; - * *
        * relative to store dir
        * 
+ * + * repeated string compaction_input = 4; */ public Builder addAllCompactionInput( java.lang.Iterable values) { ensureCompactionInputIsMutable(); - super.addAll(values, compactionInput_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, compactionInput_); onChanged(); return this; } /** - * repeated string compaction_input = 4; - * *
        * relative to store dir
        * 
+ * + * repeated string compaction_input = 4; */ public Builder clearCompactionInput() { compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; @@ -5193,11 +5198,11 @@ public final class WALProtos { return this; } /** - * repeated string compaction_input = 4; - * *
        * relative to store dir
        * 
+ * + * repeated string compaction_input = 4; */ public Builder addCompactionInputBytes( com.google.protobuf.ByteString value) { @@ -5210,7 +5215,6 @@ public final class WALProtos { return this; } - // repeated string compaction_output = 5; private com.google.protobuf.LazyStringList compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureCompactionOutputIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { @@ -5221,9 +5225,9 @@ public final class WALProtos { /** * repeated string compaction_output = 5; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getCompactionOutputList() { - return java.util.Collections.unmodifiableList(compactionOutput_); + return compactionOutput_.getUnmodifiableView(); } /** * repeated string compaction_output = 5; @@ -5276,7 +5280,8 @@ public final class WALProtos { public Builder addAllCompactionOutput( java.lang.Iterable values) { ensureCompactionOutputIsMutable(); - super.addAll(values, compactionOutput_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, compactionOutput_); onChanged(); return this; } @@ -5303,42 +5308,44 @@ public final class WALProtos { return this; } - // required string store_home_dir = 6; private java.lang.Object storeHomeDir_ = ""; /** - * required string store_home_dir = 6; - * *
        * relative to region dir
        * 
+ * + * required string store_home_dir = 6; */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * required string store_home_dir = 6; - * *
        * relative to region dir
        * 
+ * + * required string store_home_dir = 6; */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - storeHomeDir_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + storeHomeDir_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * required string store_home_dir = 6; - * *
        * relative to region dir
        * 
+ * + * required string store_home_dir = 6; */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { @@ -5354,11 +5361,11 @@ public final class WALProtos { } } /** - * required string store_home_dir = 6; - * *
        * relative to region dir
        * 
+ * + * required string store_home_dir = 6; */ public Builder setStoreHomeDir( java.lang.String value) { @@ -5371,11 +5378,11 @@ public final class WALProtos { return this; } /** - * required string store_home_dir = 6; - * *
        * relative to region dir
        * 
+ * + * required string store_home_dir = 6; */ public Builder clearStoreHomeDir() { bitField0_ = (bitField0_ & ~0x00000020); @@ -5384,11 +5391,11 @@ public final class WALProtos { return this; } /** - * required string store_home_dir = 6; - * *
        * relative to region dir
        * 
+ * + * required string store_home_dir = 6; */ public Builder setStoreHomeDirBytes( com.google.protobuf.ByteString value) { @@ -5401,34 +5408,33 @@ public final class WALProtos { return this; } - // optional bytes region_name = 7; private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; /** - * optional bytes region_name = 7; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 7; */ public boolean hasRegionName() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** - * optional bytes region_name = 7; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 7; */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } /** - * optional bytes region_name = 7; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 7; */ public Builder setRegionName(com.google.protobuf.ByteString value) { if (value == null) { @@ -5440,11 +5446,11 @@ public final class WALProtos { return this; } /** - * optional bytes region_name = 7; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 7; */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000040); @@ -5452,22 +5458,59 @@ public final class WALProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.CompactionDescriptor) } + // @@protoc_insertion_point(class_scope:hbase.pb.CompactionDescriptor) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor DEFAULT_INSTANCE; static { - defaultInstance = new CompactionDescriptor(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CompactionDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CompactionDescriptor(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.CompactionDescriptor) } - public interface FlushDescriptorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface FlushDescriptorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FlushDescriptor) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.FlushDescriptor.FlushAction action = 1; /** * required .hbase.pb.FlushDescriptor.FlushAction action = 1; */ @@ -5477,7 +5520,6 @@ public final class WALProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction(); - // required bytes table_name = 2; /** * required bytes table_name = 2; */ @@ -5487,7 +5529,6 @@ public final class WALProtos { */ com.google.protobuf.ByteString getTableName(); - // required bytes encoded_region_name = 3; /** * required bytes encoded_region_name = 3; */ @@ -5497,7 +5538,6 @@ public final class WALProtos { */ com.google.protobuf.ByteString getEncodedRegionName(); - // optional uint64 flush_sequence_number = 4; /** * optional uint64 flush_sequence_number = 4; */ @@ -5507,7 +5547,6 @@ public final class WALProtos { */ long getFlushSequenceNumber(); - // repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5; /** * repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5; */ @@ -5532,62 +5571,58 @@ public final class WALProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder( int index); - // optional bytes region_name = 6; /** - * optional bytes region_name = 6; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 6; */ boolean hasRegionName(); /** - * optional bytes region_name = 6; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 6; */ com.google.protobuf.ByteString getRegionName(); } /** - * Protobuf type {@code hbase.pb.FlushDescriptor} - * *
    **
    * Special WAL entry to hold all related to a flush.
    * 
+ * + * Protobuf type {@code hbase.pb.FlushDescriptor} */ - public static final class FlushDescriptor extends - com.google.protobuf.GeneratedMessage - implements FlushDescriptorOrBuilder { + public static final class FlushDescriptor extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FlushDescriptor) + FlushDescriptorOrBuilder { // Use FlushDescriptor.newBuilder() to construct. - private FlushDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { + private FlushDescriptor(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private FlushDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FlushDescriptor defaultInstance; - public static FlushDescriptor getDefaultInstance() { - return defaultInstance; - } - - public FlushDescriptor getDefaultInstanceForType() { - return defaultInstance; + private FlushDescriptor() { + action_ = 0; + tableName_ = com.google.protobuf.ByteString.EMPTY; + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + flushSequenceNumber_ = 0L; + storeFlushes_ = java.util.Collections.emptyList(); + regionName_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private FlushDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -5613,7 +5648,7 @@ public final class WALProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - action_ = value; + action_ = rawValue; } break; } @@ -5637,7 +5672,8 @@ public final class WALProtos { storeFlushes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000010; } - storeFlushes_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.PARSER, extensionRegistry)); + storeFlushes_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.PARSER, extensionRegistry)); break; } case 50: { @@ -5651,7 +5687,7 @@ public final class WALProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { storeFlushes_ = java.util.Collections.unmodifiableList(storeFlushes_); @@ -5665,28 +5701,13 @@ public final class WALProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FlushDescriptor parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FlushDescriptor(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.FlushDescriptor.FlushAction} */ @@ -5695,23 +5716,23 @@ public final class WALProtos { /** * START_FLUSH = 0; */ - START_FLUSH(0, 0), + START_FLUSH(0), /** * COMMIT_FLUSH = 1; */ - COMMIT_FLUSH(1, 1), + COMMIT_FLUSH(1), /** * ABORT_FLUSH = 2; */ - ABORT_FLUSH(2, 2), + ABORT_FLUSH(2), /** - * CANNOT_FLUSH = 3; - * *
        * marker for indicating that a flush has been requested but cannot complete
        * 
+ * + * CANNOT_FLUSH = 3; */ - CANNOT_FLUSH(3, 3), + CANNOT_FLUSH(3), ; /** @@ -5727,18 +5748,28 @@ public final class WALProtos { */ public static final int ABORT_FLUSH_VALUE = 2; /** - * CANNOT_FLUSH = 3; - * *
        * marker for indicating that a flush has been requested but cannot complete
        * 
+ * + * CANNOT_FLUSH = 3; */ public static final int CANNOT_FLUSH_VALUE = 3; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static FlushAction valueOf(int value) { + return forNumber(value); + } + + public static FlushAction forNumber(int value) { switch (value) { case 0: return START_FLUSH; case 1: return COMMIT_FLUSH; @@ -5752,17 +5783,17 @@ public final class WALProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + FlushAction> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public FlushAction findValueByNumber(int number) { - return FlushAction.valueOf(number); + return FlushAction.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -5784,21 +5815,19 @@ public final class WALProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private FlushAction(int index, int value) { - this.index = index; + private FlushAction(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.FlushDescriptor.FlushAction) } - public interface StoreFlushDescriptorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StoreFlushDescriptorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.FlushDescriptor.StoreFlushDescriptor) + com.google.protobuf.MessageOrBuilder { - // required bytes family_name = 1; /** * required bytes family_name = 1; */ @@ -5808,65 +5837,63 @@ public final class WALProtos { */ com.google.protobuf.ByteString getFamilyName(); - // required string store_home_dir = 2; /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ boolean hasStoreHomeDir(); /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ java.lang.String getStoreHomeDir(); /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ com.google.protobuf.ByteString getStoreHomeDirBytes(); - // repeated string flush_output = 3; /** - * repeated string flush_output = 3; - * *
        * relative to store dir (if this is a COMMIT_FLUSH)
        * 
+ * + * repeated string flush_output = 3; */ java.util.List - getFlushOutputList(); + getFlushOutputList(); /** - * repeated string flush_output = 3; - * *
        * relative to store dir (if this is a COMMIT_FLUSH)
        * 
+ * + * repeated string flush_output = 3; */ int getFlushOutputCount(); /** - * repeated string flush_output = 3; - * *
        * relative to store dir (if this is a COMMIT_FLUSH)
        * 
+ * + * repeated string flush_output = 3; */ java.lang.String getFlushOutput(int index); /** - * repeated string flush_output = 3; - * *
        * relative to store dir (if this is a COMMIT_FLUSH)
        * 
+ * + * repeated string flush_output = 3; */ com.google.protobuf.ByteString getFlushOutputBytes(int index); @@ -5874,36 +5901,30 @@ public final class WALProtos { /** * Protobuf type {@code hbase.pb.FlushDescriptor.StoreFlushDescriptor} */ - public static final class StoreFlushDescriptor extends - com.google.protobuf.GeneratedMessage - implements StoreFlushDescriptorOrBuilder { + public static final class StoreFlushDescriptor extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.FlushDescriptor.StoreFlushDescriptor) + StoreFlushDescriptorOrBuilder { // Use StoreFlushDescriptor.newBuilder() to construct. - private StoreFlushDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { + private StoreFlushDescriptor(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StoreFlushDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StoreFlushDescriptor defaultInstance; - public static StoreFlushDescriptor getDefaultInstance() { - return defaultInstance; } - - public StoreFlushDescriptor getDefaultInstanceForType() { - return defaultInstance; + private StoreFlushDescriptor() { + familyName_ = com.google.protobuf.ByteString.EMPTY; + storeHomeDir_ = ""; + flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StoreFlushDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -5928,16 +5949,18 @@ public final class WALProtos { break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - storeHomeDir_ = input.readBytes(); + storeHomeDir_ = bs; break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { flushOutput_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000004; } - flushOutput_.add(input.readBytes()); + flushOutput_.add(bs); break; } } @@ -5946,10 +5969,10 @@ public final class WALProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - flushOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(flushOutput_); + flushOutput_ = flushOutput_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -5960,30 +5983,14 @@ public final class WALProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StoreFlushDescriptor parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StoreFlushDescriptor(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder.class); } private int bitField0_; - // required bytes family_name = 1; public static final int FAMILY_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString familyName_; /** @@ -5999,25 +6006,24 @@ public final class WALProtos { return familyName_; } - // required string store_home_dir = 2; public static final int STORE_HOME_DIR_FIELD_NUMBER = 2; - private java.lang.Object storeHomeDir_; + private volatile java.lang.Object storeHomeDir_; /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; @@ -6034,11 +6040,11 @@ public final class WALProtos { } } /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { @@ -6054,61 +6060,56 @@ public final class WALProtos { } } - // repeated string flush_output = 3; public static final int FLUSH_OUTPUT_FIELD_NUMBER = 3; private com.google.protobuf.LazyStringList flushOutput_; /** - * repeated string flush_output = 3; - * *
        * relative to store dir (if this is a COMMIT_FLUSH)
        * 
+ * + * repeated string flush_output = 3; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getFlushOutputList() { return flushOutput_; } /** - * repeated string flush_output = 3; - * *
        * relative to store dir (if this is a COMMIT_FLUSH)
        * 
+ * + * repeated string flush_output = 3; */ public int getFlushOutputCount() { return flushOutput_.size(); } /** - * repeated string flush_output = 3; - * *
        * relative to store dir (if this is a COMMIT_FLUSH)
        * 
+ * + * repeated string flush_output = 3; */ public java.lang.String getFlushOutput(int index) { return flushOutput_.get(index); } /** - * repeated string flush_output = 3; - * *
        * relative to store dir (if this is a COMMIT_FLUSH)
        * 
+ * + * repeated string flush_output = 3; */ public com.google.protobuf.ByteString getFlushOutputBytes(int index) { return flushOutput_.getByteString(index); } - private void initFields() { - familyName_ = com.google.protobuf.ByteString.EMPTY; - storeHomeDir_ = ""; - flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFamilyName()) { memoizedIsInitialized = 0; @@ -6124,22 +6125,20 @@ public final class WALProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getStoreHomeDirBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, storeHomeDir_); } for (int i = 0; i < flushOutput_.size(); i++) { - output.writeBytes(3, flushOutput_.getByteString(i)); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, flushOutput_.getRaw(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -6148,31 +6147,23 @@ public final class WALProtos { .computeBytesSize(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getStoreHomeDirBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, storeHomeDir_); } { int dataSize = 0; for (int i = 0; i < flushOutput_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(flushOutput_.getByteString(i)); + dataSize += computeStringSizeNoTag(flushOutput_.getRaw(i)); } size += dataSize; size += 1 * getFlushOutputList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6195,12 +6186,10 @@ public final class WALProtos { } result = result && getFlushOutputList() .equals(other.getFlushOutputList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6220,7 +6209,7 @@ public final class WALProtos { hash = (37 * hash) + FLUSH_OUTPUT_FIELD_NUMBER; hash = (53 * hash) + getFlushOutputList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -6248,46 +6237,57 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -6295,14 +6295,15 @@ public final class WALProtos { * Protobuf type {@code hbase.pb.FlushDescriptor.StoreFlushDescriptor} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FlushDescriptor.StoreFlushDescriptor) + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6315,18 +6316,15 @@ public final class WALProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); familyName_ = com.google.protobuf.ByteString.EMPTY; @@ -6338,10 +6336,6 @@ public final class WALProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; @@ -6372,8 +6366,7 @@ public final class WALProtos { } result.storeHomeDir_ = storeHomeDir_; if (((bitField0_ & 0x00000004) == 0x00000004)) { - flushOutput_ = new com.google.protobuf.UnmodifiableLazyStringList( - flushOutput_); + flushOutput_ = flushOutput_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000004); } result.flushOutput_ = flushOutput_; @@ -6382,6 +6375,32 @@ public final class WALProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor)other); @@ -6411,17 +6430,16 @@ public final class WALProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFamilyName()) { - return false; } if (!hasStoreHomeDir()) { - return false; } return true; @@ -6436,7 +6454,7 @@ public final class WALProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -6446,7 +6464,6 @@ public final class WALProtos { } private int bitField0_; - // required bytes family_name = 1; private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family_name = 1; @@ -6482,42 +6499,44 @@ public final class WALProtos { return this; } - // required string store_home_dir = 2; private java.lang.Object storeHomeDir_ = ""; /** - * required string store_home_dir = 2; - * *
          *relative to region dir
          * 
+ * + * required string store_home_dir = 2; */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * required string store_home_dir = 2; - * *
          *relative to region dir
          * 
+ * + * required string store_home_dir = 2; */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - storeHomeDir_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + storeHomeDir_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * required string store_home_dir = 2; - * *
          *relative to region dir
          * 
+ * + * required string store_home_dir = 2; */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { @@ -6533,11 +6552,11 @@ public final class WALProtos { } } /** - * required string store_home_dir = 2; - * *
          *relative to region dir
          * 
+ * + * required string store_home_dir = 2; */ public Builder setStoreHomeDir( java.lang.String value) { @@ -6550,11 +6569,11 @@ public final class WALProtos { return this; } /** - * required string store_home_dir = 2; - * *
          *relative to region dir
          * 
+ * + * required string store_home_dir = 2; */ public Builder clearStoreHomeDir() { bitField0_ = (bitField0_ & ~0x00000002); @@ -6563,11 +6582,11 @@ public final class WALProtos { return this; } /** - * required string store_home_dir = 2; - * *
          *relative to region dir
          * 
+ * + * required string store_home_dir = 2; */ public Builder setStoreHomeDirBytes( com.google.protobuf.ByteString value) { @@ -6580,7 +6599,6 @@ public final class WALProtos { return this; } - // repeated string flush_output = 3; private com.google.protobuf.LazyStringList flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureFlushOutputIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { @@ -6589,53 +6607,53 @@ public final class WALProtos { } } /** - * repeated string flush_output = 3; - * *
          * relative to store dir (if this is a COMMIT_FLUSH)
          * 
+ * + * repeated string flush_output = 3; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getFlushOutputList() { - return java.util.Collections.unmodifiableList(flushOutput_); + return flushOutput_.getUnmodifiableView(); } /** - * repeated string flush_output = 3; - * *
          * relative to store dir (if this is a COMMIT_FLUSH)
          * 
+ * + * repeated string flush_output = 3; */ public int getFlushOutputCount() { return flushOutput_.size(); } /** - * repeated string flush_output = 3; - * *
          * relative to store dir (if this is a COMMIT_FLUSH)
          * 
+ * + * repeated string flush_output = 3; */ public java.lang.String getFlushOutput(int index) { return flushOutput_.get(index); } /** - * repeated string flush_output = 3; - * *
          * relative to store dir (if this is a COMMIT_FLUSH)
          * 
+ * + * repeated string flush_output = 3; */ public com.google.protobuf.ByteString getFlushOutputBytes(int index) { return flushOutput_.getByteString(index); } /** - * repeated string flush_output = 3; - * *
          * relative to store dir (if this is a COMMIT_FLUSH)
          * 
+ * + * repeated string flush_output = 3; */ public Builder setFlushOutput( int index, java.lang.String value) { @@ -6648,11 +6666,11 @@ public final class WALProtos { return this; } /** - * repeated string flush_output = 3; - * *
          * relative to store dir (if this is a COMMIT_FLUSH)
          * 
+ * + * repeated string flush_output = 3; */ public Builder addFlushOutput( java.lang.String value) { @@ -6665,25 +6683,26 @@ public final class WALProtos { return this; } /** - * repeated string flush_output = 3; - * *
          * relative to store dir (if this is a COMMIT_FLUSH)
          * 
+ * + * repeated string flush_output = 3; */ public Builder addAllFlushOutput( java.lang.Iterable values) { ensureFlushOutputIsMutable(); - super.addAll(values, flushOutput_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, flushOutput_); onChanged(); return this; } /** - * repeated string flush_output = 3; - * *
          * relative to store dir (if this is a COMMIT_FLUSH)
          * 
+ * + * repeated string flush_output = 3; */ public Builder clearFlushOutput() { flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; @@ -6692,11 +6711,11 @@ public final class WALProtos { return this; } /** - * repeated string flush_output = 3; - * *
          * relative to store dir (if this is a COMMIT_FLUSH)
          * 
+ * + * repeated string flush_output = 3; */ public Builder addFlushOutputBytes( com.google.protobuf.ByteString value) { @@ -6708,22 +6727,58 @@ public final class WALProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FlushDescriptor.StoreFlushDescriptor) } + // @@protoc_insertion_point(class_scope:hbase.pb.FlushDescriptor.StoreFlushDescriptor) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor DEFAULT_INSTANCE; static { - defaultInstance = new StoreFlushDescriptor(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StoreFlushDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StoreFlushDescriptor(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FlushDescriptor.StoreFlushDescriptor) } private int bitField0_; - // required .hbase.pb.FlushDescriptor.FlushAction action = 1; public static final int ACTION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction action_; + private int action_; /** * required .hbase.pb.FlushDescriptor.FlushAction action = 1; */ @@ -6734,10 +6789,10 @@ public final class WALProtos { * required .hbase.pb.FlushDescriptor.FlushAction action = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction() { - return action_; + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.valueOf(action_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH : result; } - // required bytes table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString tableName_; /** @@ -6753,7 +6808,6 @@ public final class WALProtos { return tableName_; } - // required bytes encoded_region_name = 3; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3; private com.google.protobuf.ByteString encodedRegionName_; /** @@ -6769,7 +6823,6 @@ public final class WALProtos { return encodedRegionName_; } - // optional uint64 flush_sequence_number = 4; public static final int FLUSH_SEQUENCE_NUMBER_FIELD_NUMBER = 4; private long flushSequenceNumber_; /** @@ -6785,7 +6838,6 @@ public final class WALProtos { return flushSequenceNumber_; } - // repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5; public static final int STORE_FLUSHES_FIELD_NUMBER = 5; private java.util.List storeFlushes_; /** @@ -6821,42 +6873,34 @@ public final class WALProtos { return storeFlushes_.get(index); } - // optional bytes region_name = 6; public static final int REGION_NAME_FIELD_NUMBER = 6; private com.google.protobuf.ByteString regionName_; /** - * optional bytes region_name = 6; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 6; */ public boolean hasRegionName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional bytes region_name = 6; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 6; */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } - private void initFields() { - action_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH; - tableName_ = com.google.protobuf.ByteString.EMPTY; - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - flushSequenceNumber_ = 0L; - storeFlushes_ = java.util.Collections.emptyList(); - regionName_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasAction()) { memoizedIsInitialized = 0; @@ -6882,9 +6926,8 @@ public final class WALProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, action_.getNumber()); + output.writeEnum(1, action_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, tableName_); @@ -6901,18 +6944,17 @@ public final class WALProtos { if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(6, regionName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, action_.getNumber()); + .computeEnumSize(1, action_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -6934,19 +6976,13 @@ public final class WALProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(6, regionName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6959,8 +6995,7 @@ public final class WALProtos { boolean result = true; result = result && (hasAction() == other.hasAction()); if (hasAction()) { - result = result && - (getAction() == other.getAction()); + result = result && action_ == other.action_; } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -6984,12 +7019,10 @@ public final class WALProtos { result = result && getRegionName() .equals(other.getRegionName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -6999,7 +7032,7 @@ public final class WALProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasAction()) { hash = (37 * hash) + ACTION_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getAction()); + hash = (53 * hash) + action_; } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; @@ -7011,7 +7044,8 @@ public final class WALProtos { } if (hasFlushSequenceNumber()) { hash = (37 * hash) + FLUSH_SEQUENCE_NUMBER_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getFlushSequenceNumber()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getFlushSequenceNumber()); } if (getStoreFlushesCount() > 0) { hash = (37 * hash) + STORE_FLUSHES_FIELD_NUMBER; @@ -7021,7 +7055,7 @@ public final class WALProtos { hash = (37 * hash) + REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getRegionName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7049,66 +7083,78 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.FlushDescriptor} - * *
      **
      * Special WAL entry to hold all related to a flush.
      * 
+ * + * Protobuf type {@code hbase.pb.FlushDescriptor} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.FlushDescriptor) + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7121,22 +7167,19 @@ public final class WALProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getStoreFlushesFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - action_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH; + action_ = 0; bitField0_ = (bitField0_ & ~0x00000001); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); @@ -7155,10 +7198,6 @@ public final class WALProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor; @@ -7214,6 +7253,32 @@ public final class WALProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor)other); @@ -7256,7 +7321,7 @@ public final class WALProtos { storeFlushes_ = other.storeFlushes_; bitField0_ = (bitField0_ & ~0x00000010); storeFlushesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStoreFlushesFieldBuilder() : null; } else { storeFlushesBuilder_.addAllMessages(other.storeFlushes_); @@ -7266,26 +7331,23 @@ public final class WALProtos { if (other.hasRegionName()) { setRegionName(other.getRegionName()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasAction()) { - return false; } if (!hasTableName()) { - return false; } if (!hasEncodedRegionName()) { - return false; } for (int i = 0; i < getStoreFlushesCount(); i++) { if (!getStoreFlushes(i).isInitialized()) { - return false; } } @@ -7301,7 +7363,7 @@ public final class WALProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7311,8 +7373,7 @@ public final class WALProtos { } private int bitField0_; - // required .hbase.pb.FlushDescriptor.FlushAction action = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction action_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH; + private int action_ = 0; /** * required .hbase.pb.FlushDescriptor.FlushAction action = 1; */ @@ -7323,7 +7384,8 @@ public final class WALProtos { * required .hbase.pb.FlushDescriptor.FlushAction action = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction() { - return action_; + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.valueOf(action_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH : result; } /** * required .hbase.pb.FlushDescriptor.FlushAction action = 1; @@ -7333,7 +7395,7 @@ public final class WALProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - action_ = value; + action_ = value.getNumber(); onChanged(); return this; } @@ -7342,12 +7404,11 @@ public final class WALProtos { */ public Builder clearAction() { bitField0_ = (bitField0_ & ~0x00000001); - action_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH; + action_ = 0; onChanged(); return this; } - // required bytes table_name = 2; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes table_name = 2; @@ -7383,7 +7444,6 @@ public final class WALProtos { return this; } - // required bytes encoded_region_name = 3; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes encoded_region_name = 3; @@ -7419,7 +7479,6 @@ public final class WALProtos { return this; } - // optional uint64 flush_sequence_number = 4; private long flushSequenceNumber_ ; /** * optional uint64 flush_sequence_number = 4; @@ -7452,7 +7511,6 @@ public final class WALProtos { return this; } - // repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5; private java.util.List storeFlushes_ = java.util.Collections.emptyList(); private void ensureStoreFlushesIsMutable() { @@ -7462,7 +7520,7 @@ public final class WALProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> storeFlushesBuilder_; /** @@ -7594,7 +7652,8 @@ public final class WALProtos { java.lang.Iterable values) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); - super.addAll(values, storeFlushes_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, storeFlushes_); onChanged(); } else { storeFlushesBuilder_.addAllMessages(values); @@ -7677,11 +7736,11 @@ public final class WALProtos { getStoreFlushesBuilderList() { return getStoreFlushesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> getStoreFlushesFieldBuilder() { if (storeFlushesBuilder_ == null) { - storeFlushesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + storeFlushesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder>( storeFlushes_, ((bitField0_ & 0x00000010) == 0x00000010), @@ -7692,34 +7751,33 @@ public final class WALProtos { return storeFlushesBuilder_; } - // optional bytes region_name = 6; private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; /** - * optional bytes region_name = 6; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 6; */ public boolean hasRegionName() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * optional bytes region_name = 6; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 6; */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } /** - * optional bytes region_name = 6; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 6; */ public Builder setRegionName(com.google.protobuf.ByteString value) { if (value == null) { @@ -7731,11 +7789,11 @@ public final class WALProtos { return this; } /** - * optional bytes region_name = 6; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 6; */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000020); @@ -7743,22 +7801,59 @@ public final class WALProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.FlushDescriptor) } + // @@protoc_insertion_point(class_scope:hbase.pb.FlushDescriptor) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor DEFAULT_INSTANCE; static { - defaultInstance = new FlushDescriptor(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FlushDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FlushDescriptor(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.FlushDescriptor) } - public interface StoreDescriptorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StoreDescriptorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.StoreDescriptor) + com.google.protobuf.MessageOrBuilder { - // required bytes family_name = 1; /** * required bytes family_name = 1; */ @@ -7768,120 +7863,112 @@ public final class WALProtos { */ com.google.protobuf.ByteString getFamilyName(); - // required string store_home_dir = 2; /** - * required string store_home_dir = 2; - * *
      *relative to region dir
      * 
+ * + * required string store_home_dir = 2; */ boolean hasStoreHomeDir(); /** - * required string store_home_dir = 2; - * *
      *relative to region dir
      * 
+ * + * required string store_home_dir = 2; */ java.lang.String getStoreHomeDir(); /** - * required string store_home_dir = 2; - * *
      *relative to region dir
      * 
+ * + * required string store_home_dir = 2; */ com.google.protobuf.ByteString getStoreHomeDirBytes(); - // repeated string store_file = 3; /** - * repeated string store_file = 3; - * *
      * relative to store dir
      * 
+ * + * repeated string store_file = 3; */ java.util.List - getStoreFileList(); + getStoreFileList(); /** - * repeated string store_file = 3; - * *
      * relative to store dir
      * 
+ * + * repeated string store_file = 3; */ int getStoreFileCount(); /** - * repeated string store_file = 3; - * *
      * relative to store dir
      * 
+ * + * repeated string store_file = 3; */ java.lang.String getStoreFile(int index); /** - * repeated string store_file = 3; - * *
      * relative to store dir
      * 
+ * + * repeated string store_file = 3; */ com.google.protobuf.ByteString getStoreFileBytes(int index); - // optional uint64 store_file_size_bytes = 4; /** - * optional uint64 store_file_size_bytes = 4; - * *
      * size of store file
      * 
+ * + * optional uint64 store_file_size_bytes = 4; */ boolean hasStoreFileSizeBytes(); /** - * optional uint64 store_file_size_bytes = 4; - * *
      * size of store file
      * 
+ * + * optional uint64 store_file_size_bytes = 4; */ long getStoreFileSizeBytes(); } /** * Protobuf type {@code hbase.pb.StoreDescriptor} */ - public static final class StoreDescriptor extends - com.google.protobuf.GeneratedMessage - implements StoreDescriptorOrBuilder { + public static final class StoreDescriptor extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.StoreDescriptor) + StoreDescriptorOrBuilder { // Use StoreDescriptor.newBuilder() to construct. - private StoreDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StoreDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StoreDescriptor defaultInstance; - public static StoreDescriptor getDefaultInstance() { - return defaultInstance; + private StoreDescriptor(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); } - - public StoreDescriptor getDefaultInstanceForType() { - return defaultInstance; + private StoreDescriptor() { + familyName_ = com.google.protobuf.ByteString.EMPTY; + storeHomeDir_ = ""; + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + storeFileSizeBytes_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StoreDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7906,16 +7993,18 @@ public final class WALProtos { break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - storeHomeDir_ = input.readBytes(); + storeHomeDir_ = bs; break; } case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { storeFile_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000004; } - storeFile_.add(input.readBytes()); + storeFile_.add(bs); break; } case 32: { @@ -7929,10 +8018,10 @@ public final class WALProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_); + storeFile_ = storeFile_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7943,30 +8032,14 @@ public final class WALProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public StoreDescriptor parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StoreDescriptor(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes family_name = 1; public static final int FAMILY_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString familyName_; /** @@ -7982,25 +8055,24 @@ public final class WALProtos { return familyName_; } - // required string store_home_dir = 2; public static final int STORE_HOME_DIR_FIELD_NUMBER = 2; - private java.lang.Object storeHomeDir_; + private volatile java.lang.Object storeHomeDir_; /** - * required string store_home_dir = 2; - * *
      *relative to region dir
      * 
+ * + * required string store_home_dir = 2; */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * required string store_home_dir = 2; - * *
      *relative to region dir
      * 
+ * + * required string store_home_dir = 2; */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; @@ -8017,11 +8089,11 @@ public final class WALProtos { } } /** - * required string store_home_dir = 2; - * *
      *relative to region dir
      * 
+ * + * required string store_home_dir = 2; */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { @@ -8037,86 +8109,79 @@ public final class WALProtos { } } - // repeated string store_file = 3; public static final int STORE_FILE_FIELD_NUMBER = 3; private com.google.protobuf.LazyStringList storeFile_; /** - * repeated string store_file = 3; - * *
      * relative to store dir
      * 
+ * + * repeated string store_file = 3; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getStoreFileList() { return storeFile_; } /** - * repeated string store_file = 3; - * *
      * relative to store dir
      * 
+ * + * repeated string store_file = 3; */ public int getStoreFileCount() { return storeFile_.size(); } /** - * repeated string store_file = 3; - * *
      * relative to store dir
      * 
+ * + * repeated string store_file = 3; */ public java.lang.String getStoreFile(int index) { return storeFile_.get(index); } /** - * repeated string store_file = 3; - * *
      * relative to store dir
      * 
+ * + * repeated string store_file = 3; */ public com.google.protobuf.ByteString getStoreFileBytes(int index) { return storeFile_.getByteString(index); } - // optional uint64 store_file_size_bytes = 4; public static final int STORE_FILE_SIZE_BYTES_FIELD_NUMBER = 4; private long storeFileSizeBytes_; /** - * optional uint64 store_file_size_bytes = 4; - * *
      * size of store file
      * 
+ * + * optional uint64 store_file_size_bytes = 4; */ public boolean hasStoreFileSizeBytes() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional uint64 store_file_size_bytes = 4; - * *
      * size of store file
      * 
+ * + * optional uint64 store_file_size_bytes = 4; */ public long getStoreFileSizeBytes() { return storeFileSizeBytes_; } - private void initFields() { - familyName_ = com.google.protobuf.ByteString.EMPTY; - storeHomeDir_ = ""; - storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - storeFileSizeBytes_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFamilyName()) { memoizedIsInitialized = 0; @@ -8132,25 +8197,23 @@ public final class WALProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getStoreHomeDirBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, storeHomeDir_); } for (int i = 0; i < storeFile_.size(); i++) { - output.writeBytes(3, storeFile_.getByteString(i)); + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, storeFile_.getRaw(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(4, storeFileSizeBytes_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -8159,14 +8222,12 @@ public final class WALProtos { .computeBytesSize(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getStoreHomeDirBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, storeHomeDir_); } { int dataSize = 0; for (int i = 0; i < storeFile_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(storeFile_.getByteString(i)); + dataSize += computeStringSizeNoTag(storeFile_.getRaw(i)); } size += dataSize; size += 1 * getStoreFileList().size(); @@ -8175,19 +8236,13 @@ public final class WALProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, storeFileSizeBytes_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8215,12 +8270,10 @@ public final class WALProtos { result = result && (getStoreFileSizeBytes() == other.getStoreFileSizeBytes()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -8242,9 +8295,10 @@ public final class WALProtos { } if (hasStoreFileSizeBytes()) { hash = (37 * hash) + STORE_FILE_SIZE_BYTES_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getStoreFileSizeBytes()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStoreFileSizeBytes()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -8272,46 +8326,57 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -8319,14 +8384,15 @@ public final class WALProtos { * Protobuf type {@code hbase.pb.StoreDescriptor} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.StoreDescriptor) + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8339,18 +8405,15 @@ public final class WALProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); familyName_ = com.google.protobuf.ByteString.EMPTY; @@ -8364,10 +8427,6 @@ public final class WALProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor; @@ -8398,8 +8457,7 @@ public final class WALProtos { } result.storeHomeDir_ = storeHomeDir_; if (((bitField0_ & 0x00000004) == 0x00000004)) { - storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( - storeFile_); + storeFile_ = storeFile_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000004); } result.storeFile_ = storeFile_; @@ -8412,6 +8470,32 @@ public final class WALProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor)other); @@ -8444,17 +8528,16 @@ public final class WALProtos { if (other.hasStoreFileSizeBytes()) { setStoreFileSizeBytes(other.getStoreFileSizeBytes()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFamilyName()) { - return false; } if (!hasStoreHomeDir()) { - return false; } return true; @@ -8469,7 +8552,7 @@ public final class WALProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8479,7 +8562,6 @@ public final class WALProtos { } private int bitField0_; - // required bytes family_name = 1; private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes family_name = 1; @@ -8515,42 +8597,44 @@ public final class WALProtos { return this; } - // required string store_home_dir = 2; private java.lang.Object storeHomeDir_ = ""; /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - storeHomeDir_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + storeHomeDir_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { @@ -8566,11 +8650,11 @@ public final class WALProtos { } } /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ public Builder setStoreHomeDir( java.lang.String value) { @@ -8583,11 +8667,11 @@ public final class WALProtos { return this; } /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ public Builder clearStoreHomeDir() { bitField0_ = (bitField0_ & ~0x00000002); @@ -8596,11 +8680,11 @@ public final class WALProtos { return this; } /** - * required string store_home_dir = 2; - * *
        *relative to region dir
        * 
+ * + * required string store_home_dir = 2; */ public Builder setStoreHomeDirBytes( com.google.protobuf.ByteString value) { @@ -8613,7 +8697,6 @@ public final class WALProtos { return this; } - // repeated string store_file = 3; private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureStoreFileIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { @@ -8622,53 +8705,53 @@ public final class WALProtos { } } /** - * repeated string store_file = 3; - * *
        * relative to store dir
        * 
+ * + * repeated string store_file = 3; */ - public java.util.List + public com.google.protobuf.ProtocolStringList getStoreFileList() { - return java.util.Collections.unmodifiableList(storeFile_); + return storeFile_.getUnmodifiableView(); } /** - * repeated string store_file = 3; - * *
        * relative to store dir
        * 
+ * + * repeated string store_file = 3; */ public int getStoreFileCount() { return storeFile_.size(); } /** - * repeated string store_file = 3; - * *
        * relative to store dir
        * 
+ * + * repeated string store_file = 3; */ public java.lang.String getStoreFile(int index) { return storeFile_.get(index); } /** - * repeated string store_file = 3; - * *
        * relative to store dir
        * 
+ * + * repeated string store_file = 3; */ public com.google.protobuf.ByteString getStoreFileBytes(int index) { return storeFile_.getByteString(index); } /** - * repeated string store_file = 3; - * *
        * relative to store dir
        * 
+ * + * repeated string store_file = 3; */ public Builder setStoreFile( int index, java.lang.String value) { @@ -8681,11 +8764,11 @@ public final class WALProtos { return this; } /** - * repeated string store_file = 3; - * *
        * relative to store dir
        * 
+ * + * repeated string store_file = 3; */ public Builder addStoreFile( java.lang.String value) { @@ -8698,25 +8781,26 @@ public final class WALProtos { return this; } /** - * repeated string store_file = 3; - * *
        * relative to store dir
        * 
+ * + * repeated string store_file = 3; */ public Builder addAllStoreFile( java.lang.Iterable values) { ensureStoreFileIsMutable(); - super.addAll(values, storeFile_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, storeFile_); onChanged(); return this; } /** - * repeated string store_file = 3; - * *
        * relative to store dir
        * 
+ * + * repeated string store_file = 3; */ public Builder clearStoreFile() { storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; @@ -8725,11 +8809,11 @@ public final class WALProtos { return this; } /** - * repeated string store_file = 3; - * *
        * relative to store dir
        * 
+ * + * repeated string store_file = 3; */ public Builder addStoreFileBytes( com.google.protobuf.ByteString value) { @@ -8742,34 +8826,33 @@ public final class WALProtos { return this; } - // optional uint64 store_file_size_bytes = 4; private long storeFileSizeBytes_ ; /** - * optional uint64 store_file_size_bytes = 4; - * *
        * size of store file
        * 
+ * + * optional uint64 store_file_size_bytes = 4; */ public boolean hasStoreFileSizeBytes() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional uint64 store_file_size_bytes = 4; - * *
        * size of store file
        * 
+ * + * optional uint64 store_file_size_bytes = 4; */ public long getStoreFileSizeBytes() { return storeFileSizeBytes_; } /** - * optional uint64 store_file_size_bytes = 4; - * *
        * size of store file
        * 
+ * + * optional uint64 store_file_size_bytes = 4; */ public Builder setStoreFileSizeBytes(long value) { bitField0_ |= 0x00000008; @@ -8778,11 +8861,11 @@ public final class WALProtos { return this; } /** - * optional uint64 store_file_size_bytes = 4; - * *
        * size of store file
        * 
+ * + * optional uint64 store_file_size_bytes = 4; */ public Builder clearStoreFileSizeBytes() { bitField0_ = (bitField0_ & ~0x00000008); @@ -8790,22 +8873,59 @@ public final class WALProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.StoreDescriptor) } + // @@protoc_insertion_point(class_scope:hbase.pb.StoreDescriptor) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor DEFAULT_INSTANCE; static { - defaultInstance = new StoreDescriptor(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StoreDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StoreDescriptor(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.StoreDescriptor) } - public interface BulkLoadDescriptorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface BulkLoadDescriptorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BulkLoadDescriptor) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.TableName table_name = 1; /** * required .hbase.pb.TableName table_name = 1; */ @@ -8819,7 +8939,6 @@ public final class WALProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // required bytes encoded_region_name = 2; /** * required bytes encoded_region_name = 2; */ @@ -8829,7 +8948,6 @@ public final class WALProtos { */ com.google.protobuf.ByteString getEncodedRegionName(); - // repeated .hbase.pb.StoreDescriptor stores = 3; /** * repeated .hbase.pb.StoreDescriptor stores = 3; */ @@ -8854,7 +8972,6 @@ public final class WALProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index); - // required int64 bulkload_seq_num = 4; /** * required int64 bulkload_seq_num = 4; */ @@ -8865,43 +8982,37 @@ public final class WALProtos { long getBulkloadSeqNum(); } /** - * Protobuf type {@code hbase.pb.BulkLoadDescriptor} - * *
    **
    * Special WAL entry used for writing bulk load events to WAL
    * 
+ * + * Protobuf type {@code hbase.pb.BulkLoadDescriptor} */ - public static final class BulkLoadDescriptor extends - com.google.protobuf.GeneratedMessage - implements BulkLoadDescriptorOrBuilder { + public static final class BulkLoadDescriptor extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BulkLoadDescriptor) + BulkLoadDescriptorOrBuilder { // Use BulkLoadDescriptor.newBuilder() to construct. - private BulkLoadDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { + private BulkLoadDescriptor(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private BulkLoadDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BulkLoadDescriptor defaultInstance; - public static BulkLoadDescriptor getDefaultInstance() { - return defaultInstance; } - - public BulkLoadDescriptor getDefaultInstanceForType() { - return defaultInstance; + private BulkLoadDescriptor() { + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + stores_ = java.util.Collections.emptyList(); + bulkloadSeqNum_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private BulkLoadDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8943,7 +9054,8 @@ public final class WALProtos { stores_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - stores_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry)); + stores_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry)); break; } case 32: { @@ -8957,7 +9069,7 @@ public final class WALProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { stores_ = java.util.Collections.unmodifiableList(stores_); @@ -8971,30 +9083,14 @@ public final class WALProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BulkLoadDescriptor parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BulkLoadDescriptor(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -9007,16 +9103,15 @@ public final class WALProtos { * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * required .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // required bytes encoded_region_name = 2; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString encodedRegionName_; /** @@ -9032,7 +9127,6 @@ public final class WALProtos { return encodedRegionName_; } - // repeated .hbase.pb.StoreDescriptor stores = 3; public static final int STORES_FIELD_NUMBER = 3; private java.util.List stores_; /** @@ -9068,7 +9162,6 @@ public final class WALProtos { return stores_.get(index); } - // required int64 bulkload_seq_num = 4; public static final int BULKLOAD_SEQ_NUM_FIELD_NUMBER = 4; private long bulkloadSeqNum_; /** @@ -9084,16 +9177,11 @@ public final class WALProtos { return bulkloadSeqNum_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - stores_ = java.util.Collections.emptyList(); - bulkloadSeqNum_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; @@ -9123,9 +9211,8 @@ public final class WALProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, encodedRegionName_); @@ -9136,18 +9223,17 @@ public final class WALProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt64(4, bulkloadSeqNum_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -9161,19 +9247,13 @@ public final class WALProtos { size += com.google.protobuf.CodedOutputStream .computeInt64Size(4, bulkloadSeqNum_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9201,12 +9281,10 @@ public final class WALProtos { result = result && (getBulkloadSeqNum() == other.getBulkloadSeqNum()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9228,9 +9306,10 @@ public final class WALProtos { } if (hasBulkloadSeqNum()) { hash = (37 * hash) + BULKLOAD_SEQ_NUM_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getBulkloadSeqNum()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getBulkloadSeqNum()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9258,66 +9337,78 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.BulkLoadDescriptor} - * *
      **
      * Special WAL entry used for writing bulk load events to WAL
      * 
+ * + * Protobuf type {@code hbase.pb.BulkLoadDescriptor} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.BulkLoadDescriptor) + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9330,24 +9421,21 @@ public final class WALProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getStoresFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -9365,10 +9453,6 @@ public final class WALProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor; @@ -9420,6 +9504,32 @@ public final class WALProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor)other); @@ -9456,7 +9566,7 @@ public final class WALProtos { stores_ = other.stores_; bitField0_ = (bitField0_ & ~0x00000004); storesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStoresFieldBuilder() : null; } else { storesBuilder_.addAllMessages(other.stores_); @@ -9466,30 +9576,26 @@ public final class WALProtos { if (other.hasBulkloadSeqNum()) { setBulkloadSeqNum(other.getBulkloadSeqNum()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { - return false; } if (!hasEncodedRegionName()) { - return false; } if (!hasBulkloadSeqNum()) { - return false; } if (!getTableName().isInitialized()) { - return false; } for (int i = 0; i < getStoresCount(); i++) { if (!getStores(i).isInitialized()) { - return false; } } @@ -9505,7 +9611,7 @@ public final class WALProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9515,9 +9621,8 @@ public final class WALProtos { } private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * required .hbase.pb.TableName table_name = 1; @@ -9530,7 +9635,7 @@ public final class WALProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -9571,6 +9676,7 @@ public final class WALProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -9589,7 +9695,7 @@ public final class WALProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -9612,19 +9718,20 @@ public final class WALProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * required .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -9632,7 +9739,6 @@ public final class WALProtos { return tableNameBuilder_; } - // required bytes encoded_region_name = 2; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes encoded_region_name = 2; @@ -9668,7 +9774,6 @@ public final class WALProtos { return this; } - // repeated .hbase.pb.StoreDescriptor stores = 3; private java.util.List stores_ = java.util.Collections.emptyList(); private void ensureStoresIsMutable() { @@ -9678,7 +9783,7 @@ public final class WALProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_; /** @@ -9810,7 +9915,8 @@ public final class WALProtos { java.lang.Iterable values) { if (storesBuilder_ == null) { ensureStoresIsMutable(); - super.addAll(values, stores_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, stores_); onChanged(); } else { storesBuilder_.addAllMessages(values); @@ -9893,11 +9999,11 @@ public final class WALProtos { getStoresBuilderList() { return getStoresFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresFieldBuilder() { if (storesBuilder_ == null) { - storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>( stores_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -9908,7 +10014,6 @@ public final class WALProtos { return storesBuilder_; } - // required int64 bulkload_seq_num = 4; private long bulkloadSeqNum_ ; /** * required int64 bulkload_seq_num = 4; @@ -9940,22 +10045,59 @@ public final class WALProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadDescriptor) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadDescriptor) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public BulkLoadDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BulkLoadDescriptor(input, extensionRegistry); + } + }; - // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadDescriptor) + public static com.google.protobuf.Parser parser() { + return PARSER; } - static { - defaultInstance = new BulkLoadDescriptor(true); - defaultInstance.initFields(); + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadDescriptor) } - public interface RegionEventDescriptorOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionEventDescriptorOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionEventDescriptor) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; /** * required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; */ @@ -9965,7 +10107,6 @@ public final class WALProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType(); - // required bytes table_name = 2; /** * required bytes table_name = 2; */ @@ -9975,7 +10116,6 @@ public final class WALProtos { */ com.google.protobuf.ByteString getTableName(); - // required bytes encoded_region_name = 3; /** * required bytes encoded_region_name = 3; */ @@ -9985,7 +10125,6 @@ public final class WALProtos { */ com.google.protobuf.ByteString getEncodedRegionName(); - // optional uint64 log_sequence_number = 4; /** * optional uint64 log_sequence_number = 4; */ @@ -9995,7 +10134,6 @@ public final class WALProtos { */ long getLogSequenceNumber(); - // repeated .hbase.pb.StoreDescriptor stores = 5; /** * repeated .hbase.pb.StoreDescriptor stores = 5; */ @@ -10020,88 +10158,83 @@ public final class WALProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index); - // optional .hbase.pb.ServerName server = 6; /** - * optional .hbase.pb.ServerName server = 6; - * *
      * Server who opened the region
      * 
+ * + * optional .hbase.pb.ServerName server = 6; */ boolean hasServer(); /** - * optional .hbase.pb.ServerName server = 6; - * *
      * Server who opened the region
      * 
+ * + * optional .hbase.pb.ServerName server = 6; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer(); /** - * optional .hbase.pb.ServerName server = 6; - * *
      * Server who opened the region
      * 
+ * + * optional .hbase.pb.ServerName server = 6; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - // optional bytes region_name = 7; /** - * optional bytes region_name = 7; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 7; */ boolean hasRegionName(); /** - * optional bytes region_name = 7; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 7; */ com.google.protobuf.ByteString getRegionName(); } /** - * Protobuf type {@code hbase.pb.RegionEventDescriptor} - * *
    **
    * Special WAL entry to hold all related to a region event (open/close).
    * 
+ * + * Protobuf type {@code hbase.pb.RegionEventDescriptor} */ - public static final class RegionEventDescriptor extends - com.google.protobuf.GeneratedMessage - implements RegionEventDescriptorOrBuilder { + public static final class RegionEventDescriptor extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionEventDescriptor) + RegionEventDescriptorOrBuilder { // Use RegionEventDescriptor.newBuilder() to construct. - private RegionEventDescriptor(com.google.protobuf.GeneratedMessage.Builder builder) { + private RegionEventDescriptor(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionEventDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionEventDescriptor defaultInstance; - public static RegionEventDescriptor getDefaultInstance() { - return defaultInstance; } - - public RegionEventDescriptor getDefaultInstanceForType() { - return defaultInstance; + private RegionEventDescriptor() { + eventType_ = 0; + tableName_ = com.google.protobuf.ByteString.EMPTY; + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + logSequenceNumber_ = 0L; + stores_ = java.util.Collections.emptyList(); + regionName_ = com.google.protobuf.ByteString.EMPTY; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionEventDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -10127,7 +10260,7 @@ public final class WALProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - eventType_ = value; + eventType_ = rawValue; } break; } @@ -10151,7 +10284,8 @@ public final class WALProtos { stores_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000010; } - stores_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry)); + stores_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry)); break; } case 50: { @@ -10178,7 +10312,7 @@ public final class WALProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { stores_ = java.util.Collections.unmodifiableList(stores_); @@ -10192,28 +10326,13 @@ public final class WALProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RegionEventDescriptor parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionEventDescriptor(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.RegionEventDescriptor.EventType} */ @@ -10222,11 +10341,11 @@ public final class WALProtos { /** * REGION_OPEN = 0; */ - REGION_OPEN(0, 0), + REGION_OPEN(0), /** * REGION_CLOSE = 1; */ - REGION_CLOSE(1, 1), + REGION_CLOSE(1), ; /** @@ -10239,9 +10358,19 @@ public final class WALProtos { public static final int REGION_CLOSE_VALUE = 1; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static EventType valueOf(int value) { + return forNumber(value); + } + + public static EventType forNumber(int value) { switch (value) { case 0: return REGION_OPEN; case 1: return REGION_CLOSE; @@ -10253,17 +10382,17 @@ public final class WALProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + EventType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public EventType findValueByNumber(int number) { - return EventType.valueOf(number); + return EventType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -10285,11 +10414,9 @@ public final class WALProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private EventType(int index, int value) { - this.index = index; + private EventType(int value) { this.value = value; } @@ -10297,9 +10424,8 @@ public final class WALProtos { } private int bitField0_; - // required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; public static final int EVENT_TYPE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_; + private int eventType_; /** * required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; */ @@ -10310,10 +10436,10 @@ public final class WALProtos { * required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() { - return eventType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(eventType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN : result; } - // required bytes table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString tableName_; /** @@ -10329,7 +10455,6 @@ public final class WALProtos { return tableName_; } - // required bytes encoded_region_name = 3; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3; private com.google.protobuf.ByteString encodedRegionName_; /** @@ -10345,7 +10470,6 @@ public final class WALProtos { return encodedRegionName_; } - // optional uint64 log_sequence_number = 4; public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 4; private long logSequenceNumber_; /** @@ -10361,7 +10485,6 @@ public final class WALProtos { return logSequenceNumber_; } - // repeated .hbase.pb.StoreDescriptor stores = 5; public static final int STORES_FIELD_NUMBER = 5; private java.util.List stores_; /** @@ -10397,77 +10520,67 @@ public final class WALProtos { return stores_.get(index); } - // optional .hbase.pb.ServerName server = 6; public static final int SERVER_FIELD_NUMBER = 6; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_; /** - * optional .hbase.pb.ServerName server = 6; - * *
      * Server who opened the region
      * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public boolean hasServer() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional .hbase.pb.ServerName server = 6; - * *
      * Server who opened the region
      * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } /** - * optional .hbase.pb.ServerName server = 6; - * *
      * Server who opened the region
      * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } - // optional bytes region_name = 7; public static final int REGION_NAME_FIELD_NUMBER = 7; private com.google.protobuf.ByteString regionName_; /** - * optional bytes region_name = 7; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 7; */ public boolean hasRegionName() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * optional bytes region_name = 7; - * *
      * full region name
      * 
+ * + * optional bytes region_name = 7; */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } - private void initFields() { - eventType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; - tableName_ = com.google.protobuf.ByteString.EMPTY; - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - logSequenceNumber_ = 0L; - stores_ = java.util.Collections.emptyList(); - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - regionName_ = com.google.protobuf.ByteString.EMPTY; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasEventType()) { memoizedIsInitialized = 0; @@ -10499,9 +10612,8 @@ public final class WALProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, eventType_.getNumber()); + output.writeEnum(1, eventType_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, tableName_); @@ -10516,23 +10628,22 @@ public final class WALProtos { output.writeMessage(5, stores_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(6, server_); + output.writeMessage(6, getServer()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBytes(7, regionName_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, eventType_.getNumber()); + .computeEnumSize(1, eventType_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -10552,25 +10663,19 @@ public final class WALProtos { } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, server_); + .computeMessageSize(6, getServer()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(7, regionName_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -10583,8 +10688,7 @@ public final class WALProtos { boolean result = true; result = result && (hasEventType() == other.hasEventType()); if (hasEventType()) { - result = result && - (getEventType() == other.getEventType()); + result = result && eventType_ == other.eventType_; } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { @@ -10613,12 +10717,10 @@ public final class WALProtos { result = result && getRegionName() .equals(other.getRegionName()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -10628,7 +10730,7 @@ public final class WALProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEventType()) { hash = (37 * hash) + EVENT_TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getEventType()); + hash = (53 * hash) + eventType_; } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; @@ -10640,7 +10742,8 @@ public final class WALProtos { } if (hasLogSequenceNumber()) { hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLogSequenceNumber()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLogSequenceNumber()); } if (getStoresCount() > 0) { hash = (37 * hash) + STORES_FIELD_NUMBER; @@ -10654,7 +10757,7 @@ public final class WALProtos { hash = (37 * hash) + REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getRegionName().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -10682,66 +10785,78 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.RegionEventDescriptor} - * *
      **
      * Special WAL entry to hold all related to a region event (open/close).
      * 
+ * + * Protobuf type {@code hbase.pb.RegionEventDescriptor} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptorOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionEventDescriptor) + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10754,23 +10869,20 @@ public final class WALProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getStoresFieldBuilder(); getServerFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - eventType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; + eventType_ = 0; bitField0_ = (bitField0_ & ~0x00000001); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); @@ -10785,7 +10897,7 @@ public final class WALProtos { storesBuilder_.clear(); } if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; } else { serverBuilder_.clear(); } @@ -10795,10 +10907,6 @@ public final class WALProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor; @@ -10862,6 +10970,32 @@ public final class WALProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor)other); @@ -10904,7 +11038,7 @@ public final class WALProtos { stores_ = other.stores_; bitField0_ = (bitField0_ & ~0x00000010); storesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStoresFieldBuilder() : null; } else { storesBuilder_.addAllMessages(other.stores_); @@ -10917,32 +11051,28 @@ public final class WALProtos { if (other.hasRegionName()) { setRegionName(other.getRegionName()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasEventType()) { - return false; } if (!hasTableName()) { - return false; } if (!hasEncodedRegionName()) { - return false; } for (int i = 0; i < getStoresCount(); i++) { if (!getStores(i).isInitialized()) { - return false; } } if (hasServer()) { if (!getServer().isInitialized()) { - return false; } } @@ -10958,7 +11088,7 @@ public final class WALProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -10968,8 +11098,7 @@ public final class WALProtos { } private int bitField0_; - // required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; + private int eventType_ = 0; /** * required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; */ @@ -10980,7 +11109,8 @@ public final class WALProtos { * required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() { - return eventType_; + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(eventType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN : result; } /** * required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; @@ -10990,7 +11120,7 @@ public final class WALProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - eventType_ = value; + eventType_ = value.getNumber(); onChanged(); return this; } @@ -10999,12 +11129,11 @@ public final class WALProtos { */ public Builder clearEventType() { bitField0_ = (bitField0_ & ~0x00000001); - eventType_ = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; + eventType_ = 0; onChanged(); return this; } - // required bytes table_name = 2; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes table_name = 2; @@ -11040,7 +11169,6 @@ public final class WALProtos { return this; } - // required bytes encoded_region_name = 3; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** * required bytes encoded_region_name = 3; @@ -11076,7 +11204,6 @@ public final class WALProtos { return this; } - // optional uint64 log_sequence_number = 4; private long logSequenceNumber_ ; /** * optional uint64 log_sequence_number = 4; @@ -11109,7 +11236,6 @@ public final class WALProtos { return this; } - // repeated .hbase.pb.StoreDescriptor stores = 5; private java.util.List stores_ = java.util.Collections.emptyList(); private void ensureStoresIsMutable() { @@ -11119,7 +11245,7 @@ public final class WALProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_; /** @@ -11251,7 +11377,8 @@ public final class WALProtos { java.lang.Iterable values) { if (storesBuilder_ == null) { ensureStoresIsMutable(); - super.addAll(values, stores_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, stores_); onChanged(); } else { storesBuilder_.addAllMessages(values); @@ -11334,11 +11461,11 @@ public final class WALProtos { getStoresBuilderList() { return getStoresFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresFieldBuilder() { if (storesBuilder_ == null) { - storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>( stores_, ((bitField0_ & 0x00000010) == 0x00000010), @@ -11349,40 +11476,39 @@ public final class WALProtos { return storesBuilder_; } - // optional .hbase.pb.ServerName server = 6; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; /** - * optional .hbase.pb.ServerName server = 6; - * *
        * Server who opened the region
        * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public boolean hasServer() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** - * optional .hbase.pb.ServerName server = 6; - * *
        * Server who opened the region
        * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } else { return serverBuilder_.getMessage(); } } /** - * optional .hbase.pb.ServerName server = 6; - * *
        * Server who opened the region
        * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public Builder setServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { @@ -11398,11 +11524,11 @@ public final class WALProtos { return this; } /** - * optional .hbase.pb.ServerName server = 6; - * *
        * Server who opened the region
        * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public Builder setServer( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { @@ -11416,15 +11542,16 @@ public final class WALProtos { return this; } /** - * optional .hbase.pb.ServerName server = 6; - * *
        * Server who opened the region
        * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && + server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); @@ -11439,15 +11566,15 @@ public final class WALProtos { return this; } /** - * optional .hbase.pb.ServerName server = 6; - * *
        * Server who opened the region
        * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public Builder clearServer() { if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; onChanged(); } else { serverBuilder_.clear(); @@ -11456,11 +11583,11 @@ public final class WALProtos { return this; } /** - * optional .hbase.pb.ServerName server = 6; - * *
        * Server who opened the region
        * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000020; @@ -11468,33 +11595,34 @@ public final class WALProtos { return getServerFieldBuilder().getBuilder(); } /** - * optional .hbase.pb.ServerName server = 6; - * *
        * Server who opened the region
        * 
+ * + * optional .hbase.pb.ServerName server = 6; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); } else { - return server_; + return server_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } } /** - * optional .hbase.pb.ServerName server = 6; - * *
        * Server who opened the region
        * 
+ * + * optional .hbase.pb.ServerName server = 6; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, + getServer(), getParentForChildren(), isClean()); server_ = null; @@ -11502,34 +11630,33 @@ public final class WALProtos { return serverBuilder_; } - // optional bytes region_name = 7; private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; /** - * optional bytes region_name = 7; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 7; */ public boolean hasRegionName() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** - * optional bytes region_name = 7; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 7; */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } /** - * optional bytes region_name = 7; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 7; */ public Builder setRegionName(com.google.protobuf.ByteString value) { if (value == null) { @@ -11541,11 +11668,11 @@ public final class WALProtos { return this; } /** - * optional bytes region_name = 7; - * *
        * full region name
        * 
+ * + * optional bytes region_name = 7; */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000040); @@ -11553,24 +11680,60 @@ public final class WALProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionEventDescriptor) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionEventDescriptor) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor DEFAULT_INSTANCE; static { - defaultInstance = new RegionEventDescriptor(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RegionEventDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionEventDescriptor(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionEventDescriptor) } - public interface WALTrailerOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface WALTrailerOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.WALTrailer) + com.google.protobuf.MessageOrBuilder { } /** - * Protobuf type {@code hbase.pb.WALTrailer} - * *
    **
    * A trailer that is appended to the end of a properly closed WAL file.
@@ -11578,37 +11741,30 @@ public final class WALProtos {
    * N.B. This trailer currently doesn't contain any information and we
    * purposefully don't expose it in the WAL APIs. It's for future growth.
    * 
+ * + * Protobuf type {@code hbase.pb.WALTrailer} */ - public static final class WALTrailer extends - com.google.protobuf.GeneratedMessage - implements WALTrailerOrBuilder { + public static final class WALTrailer extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.WALTrailer) + WALTrailerOrBuilder { // Use WALTrailer.newBuilder() to construct. - private WALTrailer(com.google.protobuf.GeneratedMessage.Builder builder) { + private WALTrailer(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private WALTrailer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final WALTrailer defaultInstance; - public static WALTrailer getDefaultInstance() { - return defaultInstance; } - - public WALTrailer getDefaultInstanceForType() { - return defaultInstance; + private WALTrailer() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private WALTrailer( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -11632,7 +11788,7 @@ public final class WALProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -11643,34 +11799,18 @@ public final class WALProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WALTrailer parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new WALTrailer(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -11678,29 +11818,21 @@ public final class WALProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -11711,12 +11843,10 @@ public final class WALProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer) obj; boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -11724,7 +11854,7 @@ public final class WALProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -11752,52 +11882,61 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.WALTrailer} - * *
      **
      * A trailer that is appended to the end of a properly closed WAL file.
@@ -11805,16 +11944,19 @@ public final class WALProtos {
      * N.B. This trailer currently doesn't contain any information and we
      * purposefully don't expose it in the WAL APIs. It's for future growth.
      * 
+ * + * Protobuf type {@code hbase.pb.WALTrailer} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailerOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.WALTrailer) + org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11827,27 +11969,20 @@ public final class WALProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor; @@ -11871,6 +12006,32 @@ public final class WALProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer)other); @@ -11882,7 +12043,8 @@ public final class WALProtos { public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -11899,7 +12061,7 @@ public final class WALProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -11907,74 +12069,111 @@ public final class WALProtos { } return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.WALTrailer) } + // @@protoc_insertion_point(class_scope:hbase.pb.WALTrailer) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer DEFAULT_INSTANCE; static { - defaultInstance = new WALTrailer(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public WALTrailer parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WALTrailer(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.WALTrailer) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WALHeader_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WALHeader_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WALKey_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WALKey_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FamilyScope_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FamilyScope_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CompactionDescriptor_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FlushDescriptor_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StoreDescriptor_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkLoadDescriptor_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionEventDescriptor_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WALTrailer_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WALTrailer_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -12030,78 +12229,79 @@ public final class WALProtos { "H\001\210\001\000\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_WALHeader_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_WALHeader_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_WALHeader_descriptor, - new java.lang.String[] { "HasCompression", "EncryptionKey", "HasTagCompression", "WriterClsName", "CellCodecClsName", }); - internal_static_hbase_pb_WALKey_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_WALKey_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_WALKey_descriptor, - new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", "Scopes", "FollowingKvCount", "ClusterIds", "NonceGroup", "Nonce", "OrigSequenceNumber", }); - internal_static_hbase_pb_FamilyScope_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_FamilyScope_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FamilyScope_descriptor, - new java.lang.String[] { "Family", "ScopeType", }); - internal_static_hbase_pb_CompactionDescriptor_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CompactionDescriptor_descriptor, - new java.lang.String[] { "TableName", "EncodedRegionName", "FamilyName", "CompactionInput", "CompactionOutput", "StoreHomeDir", "RegionName", }); - internal_static_hbase_pb_FlushDescriptor_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FlushDescriptor_descriptor, - new java.lang.String[] { "Action", "TableName", "EncodedRegionName", "FlushSequenceNumber", "StoreFlushes", "RegionName", }); - internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor = - internal_static_hbase_pb_FlushDescriptor_descriptor.getNestedTypes().get(0); - internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor, - new java.lang.String[] { "FamilyName", "StoreHomeDir", "FlushOutput", }); - internal_static_hbase_pb_StoreDescriptor_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_StoreDescriptor_descriptor, - new java.lang.String[] { "FamilyName", "StoreHomeDir", "StoreFile", "StoreFileSizeBytes", }); - internal_static_hbase_pb_BulkLoadDescriptor_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BulkLoadDescriptor_descriptor, - new java.lang.String[] { "TableName", "EncodedRegionName", "Stores", "BulkloadSeqNum", }); - internal_static_hbase_pb_RegionEventDescriptor_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_RegionEventDescriptor_descriptor, - new java.lang.String[] { "EventType", "TableName", "EncodedRegionName", "LogSequenceNumber", "Stores", "Server", "RegionName", }); - internal_static_hbase_pb_WALTrailer_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_WALTrailer_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_WALTrailer_descriptor, - new java.lang.String[] { }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_WALHeader_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_WALHeader_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_WALHeader_descriptor, + new java.lang.String[] { "HasCompression", "EncryptionKey", "HasTagCompression", "WriterClsName", "CellCodecClsName", }); + internal_static_hbase_pb_WALKey_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_WALKey_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_WALKey_descriptor, + new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", "Scopes", "FollowingKvCount", "ClusterIds", "NonceGroup", "Nonce", "OrigSequenceNumber", }); + internal_static_hbase_pb_FamilyScope_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_FamilyScope_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FamilyScope_descriptor, + new java.lang.String[] { "Family", "ScopeType", }); + internal_static_hbase_pb_CompactionDescriptor_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_CompactionDescriptor_descriptor, + new java.lang.String[] { "TableName", "EncodedRegionName", "FamilyName", "CompactionInput", "CompactionOutput", "StoreHomeDir", "RegionName", }); + internal_static_hbase_pb_FlushDescriptor_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FlushDescriptor_descriptor, + new java.lang.String[] { "Action", "TableName", "EncodedRegionName", "FlushSequenceNumber", "StoreFlushes", "RegionName", }); + internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor = + internal_static_hbase_pb_FlushDescriptor_descriptor.getNestedTypes().get(0); + internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor, + new java.lang.String[] { "FamilyName", "StoreHomeDir", "FlushOutput", }); + internal_static_hbase_pb_StoreDescriptor_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_StoreDescriptor_descriptor, + new java.lang.String[] { "FamilyName", "StoreHomeDir", "StoreFile", "StoreFileSizeBytes", }); + internal_static_hbase_pb_BulkLoadDescriptor_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_BulkLoadDescriptor_descriptor, + new java.lang.String[] { "TableName", "EncodedRegionName", "Stores", "BulkloadSeqNum", }); + internal_static_hbase_pb_RegionEventDescriptor_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RegionEventDescriptor_descriptor, + new java.lang.String[] { "EventType", "TableName", "EncodedRegionName", "LogSequenceNumber", "Stores", "Server", "RegionName", }); + internal_static_hbase_pb_WALTrailer_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_WALTrailer_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_WALTrailer_descriptor, + new java.lang.String[] { }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java index 9e2bd9c..d7b5221 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java @@ -6,118 +6,115 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class ZooKeeperProtos { private ZooKeeperProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface MetaRegionServerOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MetaRegionServerOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.MetaRegionServer) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ServerName server = 1; /** - * required .hbase.pb.ServerName server = 1; - * *
      * The ServerName hosting the meta region currently, or destination server,
      * if meta region is in transition.
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ boolean hasServer(); /** - * required .hbase.pb.ServerName server = 1; - * *
      * The ServerName hosting the meta region currently, or destination server,
      * if meta region is in transition.
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer(); /** - * required .hbase.pb.ServerName server = 1; - * *
      * The ServerName hosting the meta region currently, or destination server,
      * if meta region is in transition.
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); - // optional uint32 rpc_version = 2; /** - * optional uint32 rpc_version = 2; - * *
      * The major version of the rpc the server speaks.  This is used so that
      * clients connecting to the cluster can have prior knowledge of what version
      * to send to a RegionServer.  AsyncHBase will use this to detect versions.
      * 
+ * + * optional uint32 rpc_version = 2; */ boolean hasRpcVersion(); /** - * optional uint32 rpc_version = 2; - * *
      * The major version of the rpc the server speaks.  This is used so that
      * clients connecting to the cluster can have prior knowledge of what version
      * to send to a RegionServer.  AsyncHBase will use this to detect versions.
      * 
+ * + * optional uint32 rpc_version = 2; */ int getRpcVersion(); - // optional .hbase.pb.RegionState.State state = 3; /** - * optional .hbase.pb.RegionState.State state = 3; - * *
      * State of the region transition. OPEN means fully operational 'hbase:meta'
      * 
+ * + * optional .hbase.pb.RegionState.State state = 3; */ boolean hasState(); /** - * optional .hbase.pb.RegionState.State state = 3; - * *
      * State of the region transition. OPEN means fully operational 'hbase:meta'
      * 
+ * + * optional .hbase.pb.RegionState.State state = 3; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState(); } /** - * Protobuf type {@code hbase.pb.MetaRegionServer} - * *
    **
    * Content of the meta-region-server znode.
    * 
+ * + * Protobuf type {@code hbase.pb.MetaRegionServer} */ - public static final class MetaRegionServer extends - com.google.protobuf.GeneratedMessage - implements MetaRegionServerOrBuilder { + public static final class MetaRegionServer extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.MetaRegionServer) + MetaRegionServerOrBuilder { // Use MetaRegionServer.newBuilder() to construct. - private MetaRegionServer(com.google.protobuf.GeneratedMessage.Builder builder) { + private MetaRegionServer(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private MetaRegionServer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MetaRegionServer defaultInstance; - public static MetaRegionServer getDefaultInstance() { - return defaultInstance; - } - - public MetaRegionServer getDefaultInstanceForType() { - return defaultInstance; + private MetaRegionServer() { + rpcVersion_ = 0; + state_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private MetaRegionServer( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -161,7 +158,7 @@ public final class ZooKeeperProtos { unknownFields.mergeVarintField(3, rawValue); } else { bitField0_ |= 0x00000004; - state_ = value; + state_ = rawValue; } break; } @@ -171,7 +168,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -182,127 +179,106 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_MetaRegionServer_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_MetaRegionServer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MetaRegionServer parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MetaRegionServer(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ServerName server = 1; public static final int SERVER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_; /** - * required .hbase.pb.ServerName server = 1; - * *
      * The ServerName hosting the meta region currently, or destination server,
      * if meta region is in transition.
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.ServerName server = 1; - * *
      * The ServerName hosting the meta region currently, or destination server,
      * if meta region is in transition.
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } /** - * required .hbase.pb.ServerName server = 1; - * *
      * The ServerName hosting the meta region currently, or destination server,
      * if meta region is in transition.
      * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } - // optional uint32 rpc_version = 2; public static final int RPC_VERSION_FIELD_NUMBER = 2; private int rpcVersion_; /** - * optional uint32 rpc_version = 2; - * *
      * The major version of the rpc the server speaks.  This is used so that
      * clients connecting to the cluster can have prior knowledge of what version
      * to send to a RegionServer.  AsyncHBase will use this to detect versions.
      * 
+ * + * optional uint32 rpc_version = 2; */ public boolean hasRpcVersion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint32 rpc_version = 2; - * *
      * The major version of the rpc the server speaks.  This is used so that
      * clients connecting to the cluster can have prior knowledge of what version
      * to send to a RegionServer.  AsyncHBase will use this to detect versions.
      * 
+ * + * optional uint32 rpc_version = 2; */ public int getRpcVersion() { return rpcVersion_; } - // optional .hbase.pb.RegionState.State state = 3; public static final int STATE_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State state_; + private int state_; /** - * optional .hbase.pb.RegionState.State state = 3; - * *
      * State of the region transition. OPEN means fully operational 'hbase:meta'
      * 
+ * + * optional .hbase.pb.RegionState.State state = 3; */ public boolean hasState() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional .hbase.pb.RegionState.State state = 3; - * *
      * State of the region transition. OPEN means fully operational 'hbase:meta'
      * 
+ * + * optional .hbase.pb.RegionState.State state = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE : result; } - private void initFields() { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - rpcVersion_ = 0; - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasServer()) { memoizedIsInitialized = 0; @@ -318,28 +294,26 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, server_); + output.writeMessage(1, getServer()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, rpcVersion_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeEnum(3, state_.getNumber()); + output.writeEnum(3, state_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, server_); + .computeMessageSize(1, getServer()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -347,21 +321,15 @@ public final class ZooKeeperProtos { } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(3, state_.getNumber()); + .computeEnumSize(3, state_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -384,15 +352,12 @@ public final class ZooKeeperProtos { } result = result && (hasState() == other.hasState()); if (hasState()) { - result = result && - (getState() == other.getState()); + result = result && state_ == other.state_; } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -410,9 +375,9 @@ public final class ZooKeeperProtos { } if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); + hash = (53 * hash) + state_; } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -440,66 +405,78 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.MetaRegionServer} - * *
      **
      * Content of the meta-region-server znode.
      * 
+ * + * Protobuf type {@code hbase.pb.MetaRegionServer} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServerOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.MetaRegionServer) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_MetaRegionServer_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_MetaRegionServer_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -512,38 +489,31 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getServerFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; } else { serverBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); rpcVersion_ = 0; bitField0_ = (bitField0_ & ~0x00000002); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + state_ = 0; bitField0_ = (bitField0_ & ~0x00000004); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_MetaRegionServer_descriptor; @@ -586,6 +556,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer)other); @@ -606,17 +602,16 @@ public final class ZooKeeperProtos { if (other.hasState()) { setState(other.getState()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasServer()) { - return false; } if (!getServer().isInitialized()) { - return false; } return true; @@ -631,7 +626,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -641,43 +636,42 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required .hbase.pb.ServerName server = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; /** - * required .hbase.pb.ServerName server = 1; - * *
        * The ServerName hosting the meta region currently, or destination server,
        * if meta region is in transition.
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.ServerName server = 1; - * *
        * The ServerName hosting the meta region currently, or destination server,
        * if meta region is in transition.
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { - return server_; + return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } else { return serverBuilder_.getMessage(); } } /** - * required .hbase.pb.ServerName server = 1; - * *
        * The ServerName hosting the meta region currently, or destination server,
        * if meta region is in transition.
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder setServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { @@ -693,12 +687,12 @@ public final class ZooKeeperProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        * The ServerName hosting the meta region currently, or destination server,
        * if meta region is in transition.
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder setServer( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { @@ -712,16 +706,17 @@ public final class ZooKeeperProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        * The ServerName hosting the meta region currently, or destination server,
        * if meta region is in transition.
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); @@ -736,16 +731,16 @@ public final class ZooKeeperProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        * The ServerName hosting the meta region currently, or destination server,
        * if meta region is in transition.
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public Builder clearServer() { if (serverBuilder_ == null) { - server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + server_ = null; onChanged(); } else { serverBuilder_.clear(); @@ -754,12 +749,12 @@ public final class ZooKeeperProtos { return this; } /** - * required .hbase.pb.ServerName server = 1; - * *
        * The ServerName hosting the meta region currently, or destination server,
        * if meta region is in transition.
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000001; @@ -767,35 +762,36 @@ public final class ZooKeeperProtos { return getServerFieldBuilder().getBuilder(); } /** - * required .hbase.pb.ServerName server = 1; - * *
        * The ServerName hosting the meta region currently, or destination server,
        * if meta region is in transition.
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); } else { - return server_; + return server_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } } /** - * required .hbase.pb.ServerName server = 1; - * *
        * The ServerName hosting the meta region currently, or destination server,
        * if meta region is in transition.
        * 
+ * + * required .hbase.pb.ServerName server = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { if (serverBuilder_ == null) { - serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - server_, + getServer(), getParentForChildren(), isClean()); server_ = null; @@ -803,40 +799,39 @@ public final class ZooKeeperProtos { return serverBuilder_; } - // optional uint32 rpc_version = 2; private int rpcVersion_ ; /** - * optional uint32 rpc_version = 2; - * *
        * The major version of the rpc the server speaks.  This is used so that
        * clients connecting to the cluster can have prior knowledge of what version
        * to send to a RegionServer.  AsyncHBase will use this to detect versions.
        * 
+ * + * optional uint32 rpc_version = 2; */ public boolean hasRpcVersion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint32 rpc_version = 2; - * *
        * The major version of the rpc the server speaks.  This is used so that
        * clients connecting to the cluster can have prior knowledge of what version
        * to send to a RegionServer.  AsyncHBase will use this to detect versions.
        * 
+ * + * optional uint32 rpc_version = 2; */ public int getRpcVersion() { return rpcVersion_; } /** - * optional uint32 rpc_version = 2; - * *
        * The major version of the rpc the server speaks.  This is used so that
        * clients connecting to the cluster can have prior knowledge of what version
        * to send to a RegionServer.  AsyncHBase will use this to detect versions.
        * 
+ * + * optional uint32 rpc_version = 2; */ public Builder setRpcVersion(int value) { bitField0_ |= 0x00000002; @@ -845,13 +840,13 @@ public final class ZooKeeperProtos { return this; } /** - * optional uint32 rpc_version = 2; - * *
        * The major version of the rpc the server speaks.  This is used so that
        * clients connecting to the cluster can have prior knowledge of what version
        * to send to a RegionServer.  AsyncHBase will use this to detect versions.
        * 
+ * + * optional uint32 rpc_version = 2; */ public Builder clearRpcVersion() { bitField0_ = (bitField0_ & ~0x00000002); @@ -860,117 +855,152 @@ public final class ZooKeeperProtos { return this; } - // optional .hbase.pb.RegionState.State state = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + private int state_ = 0; /** - * optional .hbase.pb.RegionState.State state = 3; - * *
        * State of the region transition. OPEN means fully operational 'hbase:meta'
        * 
+ * + * optional .hbase.pb.RegionState.State state = 3; */ public boolean hasState() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional .hbase.pb.RegionState.State state = 3; - * *
        * State of the region transition. OPEN means fully operational 'hbase:meta'
        * 
+ * + * optional .hbase.pb.RegionState.State state = 3; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE : result; } /** - * optional .hbase.pb.RegionState.State state = 3; - * *
        * State of the region transition. OPEN means fully operational 'hbase:meta'
        * 
+ * + * optional .hbase.pb.RegionState.State state = 3; */ public Builder setState(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; - state_ = value; + state_ = value.getNumber(); onChanged(); return this; } /** - * optional .hbase.pb.RegionState.State state = 3; - * *
        * State of the region transition. OPEN means fully operational 'hbase:meta'
        * 
+ * + * optional .hbase.pb.RegionState.State state = 3; */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000004); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + state_ = 0; onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.MetaRegionServer) } + // @@protoc_insertion_point(class_scope:hbase.pb.MetaRegionServer) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer DEFAULT_INSTANCE; static { - defaultInstance = new MetaRegionServer(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MetaRegionServer parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MetaRegionServer(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.MetaRegionServer) } - public interface MasterOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface MasterOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Master) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ServerName master = 1; /** - * required .hbase.pb.ServerName master = 1; - * *
      * The ServerName of the current Master
      * 
+ * + * required .hbase.pb.ServerName master = 1; */ boolean hasMaster(); /** - * required .hbase.pb.ServerName master = 1; - * *
      * The ServerName of the current Master
      * 
+ * + * required .hbase.pb.ServerName master = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getMaster(); /** - * required .hbase.pb.ServerName master = 1; - * *
      * The ServerName of the current Master
      * 
+ * + * required .hbase.pb.ServerName master = 1; */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder(); - // optional uint32 rpc_version = 2; /** - * optional uint32 rpc_version = 2; - * *
      * Major RPC version so that clients can know what version the master can accept.
      * 
+ * + * optional uint32 rpc_version = 2; */ boolean hasRpcVersion(); /** - * optional uint32 rpc_version = 2; - * *
      * Major RPC version so that clients can know what version the master can accept.
      * 
+ * + * optional uint32 rpc_version = 2; */ int getRpcVersion(); - // optional uint32 info_port = 3; /** * optional uint32 info_port = 3; */ @@ -981,43 +1011,36 @@ public final class ZooKeeperProtos { int getInfoPort(); } /** - * Protobuf type {@code hbase.pb.Master} - * *
    **
    * Content of the master znode.
    * 
+ * + * Protobuf type {@code hbase.pb.Master} */ - public static final class Master extends - com.google.protobuf.GeneratedMessage - implements MasterOrBuilder { + public static final class Master extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Master) + MasterOrBuilder { // Use Master.newBuilder() to construct. - private Master(com.google.protobuf.GeneratedMessage.Builder builder) { + private Master(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private Master(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final Master defaultInstance; - public static Master getDefaultInstance() { - return defaultInstance; - } - - public Master getDefaultInstanceForType() { - return defaultInstance; + private Master() { + rpcVersion_ = 0; + infoPort_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private Master( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1065,7 +1088,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1076,88 +1099,70 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_Master_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_Master_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Master parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Master(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.ServerName master = 1; public static final int MASTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName master_; /** - * required .hbase.pb.ServerName master = 1; - * *
      * The ServerName of the current Master
      * 
+ * + * required .hbase.pb.ServerName master = 1; */ public boolean hasMaster() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.ServerName master = 1; - * *
      * The ServerName of the current Master
      * 
+ * + * required .hbase.pb.ServerName master = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getMaster() { - return master_; + return master_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } /** - * required .hbase.pb.ServerName master = 1; - * *
      * The ServerName of the current Master
      * 
+ * + * required .hbase.pb.ServerName master = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { - return master_; + return master_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } - // optional uint32 rpc_version = 2; public static final int RPC_VERSION_FIELD_NUMBER = 2; private int rpcVersion_; /** - * optional uint32 rpc_version = 2; - * *
      * Major RPC version so that clients can know what version the master can accept.
      * 
+ * + * optional uint32 rpc_version = 2; */ public boolean hasRpcVersion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint32 rpc_version = 2; - * *
      * Major RPC version so that clients can know what version the master can accept.
      * 
+ * + * optional uint32 rpc_version = 2; */ public int getRpcVersion() { return rpcVersion_; } - // optional uint32 info_port = 3; public static final int INFO_PORT_FIELD_NUMBER = 3; private int infoPort_; /** @@ -1173,15 +1178,11 @@ public final class ZooKeeperProtos { return infoPort_; } - private void initFields() { - master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - rpcVersion_ = 0; - infoPort_ = 0; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasMaster()) { memoizedIsInitialized = 0; @@ -1197,9 +1198,8 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, master_); + output.writeMessage(1, getMaster()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, rpcVersion_); @@ -1207,18 +1207,17 @@ public final class ZooKeeperProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt32(3, infoPort_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, master_); + .computeMessageSize(1, getMaster()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -1228,19 +1227,13 @@ public final class ZooKeeperProtos { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(3, infoPort_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1266,12 +1259,10 @@ public final class ZooKeeperProtos { result = result && (getInfoPort() == other.getInfoPort()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1291,7 +1282,7 @@ public final class ZooKeeperProtos { hash = (37 * hash) + INFO_PORT_FIELD_NUMBER; hash = (53 * hash) + getInfoPort(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1319,66 +1310,78 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.Master} - * *
      **
      * Content of the master znode.
      * 
+ * + * Protobuf type {@code hbase.pb.Master} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MasterOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Master) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MasterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_Master_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_Master_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1391,23 +1394,20 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getMasterFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (masterBuilder_ == null) { - master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + master_ = null; } else { masterBuilder_.clear(); } @@ -1419,10 +1419,6 @@ public final class ZooKeeperProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_Master_descriptor; @@ -1465,6 +1461,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master)other); @@ -1485,17 +1507,16 @@ public final class ZooKeeperProtos { if (other.hasInfoPort()) { setInfoPort(other.getInfoPort()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasMaster()) { - return false; } if (!getMaster().isInitialized()) { - return false; } return true; @@ -1510,7 +1531,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1520,40 +1541,39 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required .hbase.pb.ServerName master = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName master_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_; /** - * required .hbase.pb.ServerName master = 1; - * *
        * The ServerName of the current Master
        * 
+ * + * required .hbase.pb.ServerName master = 1; */ public boolean hasMaster() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.ServerName master = 1; - * *
        * The ServerName of the current Master
        * 
+ * + * required .hbase.pb.ServerName master = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getMaster() { if (masterBuilder_ == null) { - return master_; + return master_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } else { return masterBuilder_.getMessage(); } } /** - * required .hbase.pb.ServerName master = 1; - * *
        * The ServerName of the current Master
        * 
+ * + * required .hbase.pb.ServerName master = 1; */ public Builder setMaster(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { @@ -1569,11 +1589,11 @@ public final class ZooKeeperProtos { return this; } /** - * required .hbase.pb.ServerName master = 1; - * *
        * The ServerName of the current Master
        * 
+ * + * required .hbase.pb.ServerName master = 1; */ public Builder setMaster( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { @@ -1587,15 +1607,16 @@ public final class ZooKeeperProtos { return this; } /** - * required .hbase.pb.ServerName master = 1; - * *
        * The ServerName of the current Master
        * 
+ * + * required .hbase.pb.ServerName master = 1; */ public Builder mergeMaster(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + master_ != null && master_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial(); @@ -1610,15 +1631,15 @@ public final class ZooKeeperProtos { return this; } /** - * required .hbase.pb.ServerName master = 1; - * *
        * The ServerName of the current Master
        * 
+ * + * required .hbase.pb.ServerName master = 1; */ public Builder clearMaster() { if (masterBuilder_ == null) { - master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + master_ = null; onChanged(); } else { masterBuilder_.clear(); @@ -1627,11 +1648,11 @@ public final class ZooKeeperProtos { return this; } /** - * required .hbase.pb.ServerName master = 1; - * *
        * The ServerName of the current Master
        * 
+ * + * required .hbase.pb.ServerName master = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getMasterBuilder() { bitField0_ |= 0x00000001; @@ -1639,33 +1660,34 @@ public final class ZooKeeperProtos { return getMasterFieldBuilder().getBuilder(); } /** - * required .hbase.pb.ServerName master = 1; - * *
        * The ServerName of the current Master
        * 
+ * + * required .hbase.pb.ServerName master = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { if (masterBuilder_ != null) { return masterBuilder_.getMessageOrBuilder(); } else { - return master_; + return master_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } } /** - * required .hbase.pb.ServerName master = 1; - * *
        * The ServerName of the current Master
        * 
+ * + * required .hbase.pb.ServerName master = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getMasterFieldBuilder() { if (masterBuilder_ == null) { - masterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + masterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - master_, + getMaster(), getParentForChildren(), isClean()); master_ = null; @@ -1673,34 +1695,33 @@ public final class ZooKeeperProtos { return masterBuilder_; } - // optional uint32 rpc_version = 2; private int rpcVersion_ ; /** - * optional uint32 rpc_version = 2; - * *
        * Major RPC version so that clients can know what version the master can accept.
        * 
+ * + * optional uint32 rpc_version = 2; */ public boolean hasRpcVersion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional uint32 rpc_version = 2; - * *
        * Major RPC version so that clients can know what version the master can accept.
        * 
+ * + * optional uint32 rpc_version = 2; */ public int getRpcVersion() { return rpcVersion_; } /** - * optional uint32 rpc_version = 2; - * *
        * Major RPC version so that clients can know what version the master can accept.
        * 
+ * + * optional uint32 rpc_version = 2; */ public Builder setRpcVersion(int value) { bitField0_ |= 0x00000002; @@ -1709,11 +1730,11 @@ public final class ZooKeeperProtos { return this; } /** - * optional uint32 rpc_version = 2; - * *
        * Major RPC version so that clients can know what version the master can accept.
        * 
+ * + * optional uint32 rpc_version = 2; */ public Builder clearRpcVersion() { bitField0_ = (bitField0_ & ~0x00000002); @@ -1722,7 +1743,6 @@ public final class ZooKeeperProtos { return this; } - // optional uint32 info_port = 3; private int infoPort_ ; /** * optional uint32 info_port = 3; @@ -1754,89 +1774,118 @@ public final class ZooKeeperProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.Master) } + // @@protoc_insertion_point(class_scope:hbase.pb.Master) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master DEFAULT_INSTANCE; static { - defaultInstance = new Master(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Master parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Master(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.Master) } - public interface ClusterUpOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ClusterUpOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ClusterUp) + com.google.protobuf.MessageOrBuilder { - // required string start_date = 1; /** - * required string start_date = 1; - * *
      * If this znode is present, cluster is up.  Currently
      * the data is cluster start_date.
      * 
+ * + * required string start_date = 1; */ boolean hasStartDate(); /** - * required string start_date = 1; - * *
      * If this znode is present, cluster is up.  Currently
      * the data is cluster start_date.
      * 
+ * + * required string start_date = 1; */ java.lang.String getStartDate(); /** - * required string start_date = 1; - * *
      * If this znode is present, cluster is up.  Currently
      * the data is cluster start_date.
      * 
+ * + * required string start_date = 1; */ com.google.protobuf.ByteString getStartDateBytes(); } /** - * Protobuf type {@code hbase.pb.ClusterUp} - * *
    **
    * Content of the '/hbase/running', cluster state, znode.
    * 
+ * + * Protobuf type {@code hbase.pb.ClusterUp} */ - public static final class ClusterUp extends - com.google.protobuf.GeneratedMessage - implements ClusterUpOrBuilder { + public static final class ClusterUp extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ClusterUp) + ClusterUpOrBuilder { // Use ClusterUp.newBuilder() to construct. - private ClusterUp(com.google.protobuf.GeneratedMessage.Builder builder) { + private ClusterUp(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ClusterUp(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ClusterUp defaultInstance; - public static ClusterUp getDefaultInstance() { - return defaultInstance; } - - public ClusterUp getDefaultInstanceForType() { - return defaultInstance; + private ClusterUp() { + startDate_ = ""; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ClusterUp( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1856,8 +1905,9 @@ public final class ZooKeeperProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - startDate_ = input.readBytes(); + startDate_ = bs; break; } } @@ -1866,7 +1916,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1877,50 +1927,34 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ClusterUp_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ClusterUp_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ClusterUp parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ClusterUp(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string start_date = 1; public static final int START_DATE_FIELD_NUMBER = 1; - private java.lang.Object startDate_; + private volatile java.lang.Object startDate_; /** - * required string start_date = 1; - * *
      * If this znode is present, cluster is up.  Currently
      * the data is cluster start_date.
      * 
+ * + * required string start_date = 1; */ public boolean hasStartDate() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string start_date = 1; - * *
      * If this znode is present, cluster is up.  Currently
      * the data is cluster start_date.
      * 
+ * + * required string start_date = 1; */ public java.lang.String getStartDate() { java.lang.Object ref = startDate_; @@ -1937,12 +1971,12 @@ public final class ZooKeeperProtos { } } /** - * required string start_date = 1; - * *
      * If this znode is present, cluster is up.  Currently
      * the data is cluster start_date.
      * 
+ * + * required string start_date = 1; */ public com.google.protobuf.ByteString getStartDateBytes() { @@ -1958,13 +1992,11 @@ public final class ZooKeeperProtos { } } - private void initFields() { - startDate_ = ""; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasStartDate()) { memoizedIsInitialized = 0; @@ -1976,36 +2008,27 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getStartDateBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, startDate_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getStartDateBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, startDate_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2021,12 +2044,10 @@ public final class ZooKeeperProtos { result = result && getStartDate() .equals(other.getStartDate()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2038,7 +2059,7 @@ public final class ZooKeeperProtos { hash = (37 * hash) + START_DATE_FIELD_NUMBER; hash = (53 * hash) + getStartDate().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2066,66 +2087,78 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ClusterUp} - * *
      **
      * Content of the '/hbase/running', cluster state, znode.
      * 
+ * + * Protobuf type {@code hbase.pb.ClusterUp} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUpOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ClusterUp) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUpOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ClusterUp_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ClusterUp_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2138,18 +2171,15 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); startDate_ = ""; @@ -2157,10 +2187,6 @@ public final class ZooKeeperProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ClusterUp_descriptor; @@ -2191,6 +2217,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp)other); @@ -2207,13 +2259,13 @@ public final class ZooKeeperProtos { startDate_ = other.startDate_; onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasStartDate()) { - return false; } return true; @@ -2228,7 +2280,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2238,45 +2290,47 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required string start_date = 1; private java.lang.Object startDate_ = ""; /** - * required string start_date = 1; - * *
        * If this znode is present, cluster is up.  Currently
        * the data is cluster start_date.
        * 
+ * + * required string start_date = 1; */ public boolean hasStartDate() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string start_date = 1; - * *
        * If this znode is present, cluster is up.  Currently
        * the data is cluster start_date.
        * 
+ * + * required string start_date = 1; */ public java.lang.String getStartDate() { java.lang.Object ref = startDate_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - startDate_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + startDate_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * required string start_date = 1; - * *
        * If this znode is present, cluster is up.  Currently
        * the data is cluster start_date.
        * 
+ * + * required string start_date = 1; */ public com.google.protobuf.ByteString getStartDateBytes() { @@ -2292,12 +2346,12 @@ public final class ZooKeeperProtos { } } /** - * required string start_date = 1; - * *
        * If this znode is present, cluster is up.  Currently
        * the data is cluster start_date.
        * 
+ * + * required string start_date = 1; */ public Builder setStartDate( java.lang.String value) { @@ -2310,12 +2364,12 @@ public final class ZooKeeperProtos { return this; } /** - * required string start_date = 1; - * *
        * If this znode is present, cluster is up.  Currently
        * the data is cluster start_date.
        * 
+ * + * required string start_date = 1; */ public Builder clearStartDate() { bitField0_ = (bitField0_ & ~0x00000001); @@ -2324,12 +2378,12 @@ public final class ZooKeeperProtos { return this; } /** - * required string start_date = 1; - * *
        * If this znode is present, cluster is up.  Currently
        * the data is cluster start_date.
        * 
+ * + * required string start_date = 1; */ public Builder setStartDateBytes( com.google.protobuf.ByteString value) { @@ -2341,22 +2395,59 @@ public final class ZooKeeperProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ClusterUp) } + // @@protoc_insertion_point(class_scope:hbase.pb.ClusterUp) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp DEFAULT_INSTANCE; static { - defaultInstance = new ClusterUp(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ClusterUp parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClusterUp(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ClusterUp) } - public interface SplitLogTaskOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SplitLogTaskOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SplitLogTask) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.SplitLogTask.State state = 1; /** * required .hbase.pb.SplitLogTask.State state = 1; */ @@ -2366,7 +2457,6 @@ public final class ZooKeeperProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState(); - // required .hbase.pb.ServerName server_name = 2; /** * required .hbase.pb.ServerName server_name = 2; */ @@ -2380,7 +2470,6 @@ public final class ZooKeeperProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); - // optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN]; /** * optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN]; */ @@ -2391,44 +2480,37 @@ public final class ZooKeeperProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode getMode(); } /** - * Protobuf type {@code hbase.pb.SplitLogTask} - * *
    **
    * WAL SplitLog directory znodes have this for content.  Used doing distributed
    * WAL splitting.  Holds current state and name of server that originated split.
    * 
+ * + * Protobuf type {@code hbase.pb.SplitLogTask} */ - public static final class SplitLogTask extends - com.google.protobuf.GeneratedMessage - implements SplitLogTaskOrBuilder { + public static final class SplitLogTask extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SplitLogTask) + SplitLogTaskOrBuilder { // Use SplitLogTask.newBuilder() to construct. - private SplitLogTask(com.google.protobuf.GeneratedMessage.Builder builder) { + private SplitLogTask(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private SplitLogTask(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SplitLogTask defaultInstance; - public static SplitLogTask getDefaultInstance() { - return defaultInstance; - } - - public SplitLogTask getDefaultInstanceForType() { - return defaultInstance; + private SplitLogTask() { + state_ = 0; + mode_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SplitLogTask( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -2454,7 +2536,7 @@ public final class ZooKeeperProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - state_ = value; + state_ = rawValue; } break; } @@ -2478,7 +2560,7 @@ public final class ZooKeeperProtos { unknownFields.mergeVarintField(3, rawValue); } else { bitField0_ |= 0x00000004; - mode_ = value; + mode_ = rawValue; } break; } @@ -2488,7 +2570,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -2499,28 +2581,13 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SplitLogTask_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SplitLogTask_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SplitLogTask parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SplitLogTask(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.SplitLogTask.State} */ @@ -2529,23 +2596,23 @@ public final class ZooKeeperProtos { /** * UNASSIGNED = 0; */ - UNASSIGNED(0, 0), + UNASSIGNED(0), /** * OWNED = 1; */ - OWNED(1, 1), + OWNED(1), /** * RESIGNED = 2; */ - RESIGNED(2, 2), + RESIGNED(2), /** * DONE = 3; */ - DONE(3, 3), + DONE(3), /** * ERR = 4; */ - ERR(4, 4), + ERR(4), ; /** @@ -2570,9 +2637,19 @@ public final class ZooKeeperProtos { public static final int ERR_VALUE = 4; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static State valueOf(int value) { + return forNumber(value); + } + + public static State forNumber(int value) { switch (value) { case 0: return UNASSIGNED; case 1: return OWNED; @@ -2587,17 +2664,17 @@ public final class ZooKeeperProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public State findValueByNumber(int number) { - return State.valueOf(number); + return State.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -2619,11 +2696,9 @@ public final class ZooKeeperProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private State(int index, int value) { - this.index = index; + private State(int value) { this.value = value; } @@ -2638,15 +2713,15 @@ public final class ZooKeeperProtos { /** * UNKNOWN = 0; */ - UNKNOWN(0, 0), + UNKNOWN(0), /** * LOG_SPLITTING = 1; */ - LOG_SPLITTING(1, 1), + LOG_SPLITTING(1), /** * LOG_REPLAY = 2; */ - LOG_REPLAY(2, 2), + LOG_REPLAY(2), ; /** @@ -2663,9 +2738,19 @@ public final class ZooKeeperProtos { public static final int LOG_REPLAY_VALUE = 2; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static RecoveryMode valueOf(int value) { + return forNumber(value); + } + + public static RecoveryMode forNumber(int value) { switch (value) { case 0: return UNKNOWN; case 1: return LOG_SPLITTING; @@ -2678,17 +2763,17 @@ public final class ZooKeeperProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + RecoveryMode> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public RecoveryMode findValueByNumber(int number) { - return RecoveryMode.valueOf(number); + return RecoveryMode.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -2710,11 +2795,9 @@ public final class ZooKeeperProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private RecoveryMode(int index, int value) { - this.index = index; + private RecoveryMode(int value) { this.value = value; } @@ -2722,9 +2805,8 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required .hbase.pb.SplitLogTask.State state = 1; public static final int STATE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State state_; + private int state_; /** * required .hbase.pb.SplitLogTask.State state = 1; */ @@ -2735,10 +2817,10 @@ public final class ZooKeeperProtos { * required .hbase.pb.SplitLogTask.State state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED : result; } - // required .hbase.pb.ServerName server_name = 2; public static final int SERVER_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_; /** @@ -2751,18 +2833,17 @@ public final class ZooKeeperProtos { * required .hbase.pb.ServerName server_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName() { - return serverName_; + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } /** * required .hbase.pb.ServerName server_name = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - return serverName_; + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } - // optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN]; public static final int MODE_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode mode_; + private int mode_; /** * optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN]; */ @@ -2773,18 +2854,15 @@ public final class ZooKeeperProtos { * optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode getMode() { - return mode_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.valueOf(mode_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN : result; } - private void initFields() { - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; - serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - mode_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasState()) { memoizedIsInitialized = 0; @@ -2804,50 +2882,42 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, state_.getNumber()); + output.writeEnum(1, state_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, serverName_); + output.writeMessage(2, getServerName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeEnum(3, mode_.getNumber()); + output.writeEnum(3, mode_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, state_.getNumber()); + .computeEnumSize(1, state_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, serverName_); + .computeMessageSize(2, getServerName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(3, mode_.getNumber()); + .computeEnumSize(3, mode_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2860,8 +2930,7 @@ public final class ZooKeeperProtos { boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { - result = result && - (getState() == other.getState()); + result = result && state_ == other.state_; } result = result && (hasServerName() == other.hasServerName()); if (hasServerName()) { @@ -2870,15 +2939,12 @@ public final class ZooKeeperProtos { } result = result && (hasMode() == other.hasMode()); if (hasMode()) { - result = result && - (getMode() == other.getMode()); + result = result && mode_ == other.mode_; } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2888,7 +2954,7 @@ public final class ZooKeeperProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); + hash = (53 * hash) + state_; } if (hasServerName()) { hash = (37 * hash) + SERVER_NAME_FIELD_NUMBER; @@ -2896,9 +2962,9 @@ public final class ZooKeeperProtos { } if (hasMode()) { hash = (37 * hash) + MODE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getMode()); + hash = (53 * hash) + mode_; } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2926,67 +2992,79 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.SplitLogTask} - * *
      **
      * WAL SplitLog directory znodes have this for content.  Used doing distributed
      * WAL splitting.  Holds current state and name of server that originated split.
      * 
+ * + * Protobuf type {@code hbase.pb.SplitLogTask} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTaskOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SplitLogTask) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTaskOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SplitLogTask_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SplitLogTask_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2999,38 +3077,31 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getServerNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; + state_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + serverName_ = null; } else { serverNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); - mode_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN; + mode_ = 0; bitField0_ = (bitField0_ & ~0x00000004); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SplitLogTask_descriptor; @@ -3073,6 +3144,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask)other); @@ -3093,21 +3190,19 @@ public final class ZooKeeperProtos { if (other.hasMode()) { setMode(other.getMode()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasState()) { - return false; } if (!hasServerName()) { - return false; } if (!getServerName().isInitialized()) { - return false; } return true; @@ -3122,7 +3217,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3132,8 +3227,7 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required .hbase.pb.SplitLogTask.State state = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; + private int state_ = 0; /** * required .hbase.pb.SplitLogTask.State state = 1; */ @@ -3144,7 +3238,8 @@ public final class ZooKeeperProtos { * required .hbase.pb.SplitLogTask.State state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED : result; } /** * required .hbase.pb.SplitLogTask.State state = 1; @@ -3154,7 +3249,7 @@ public final class ZooKeeperProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - state_ = value; + state_ = value.getNumber(); onChanged(); return this; } @@ -3163,14 +3258,13 @@ public final class ZooKeeperProtos { */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED; + state_ = 0; onChanged(); return this; } - // required .hbase.pb.ServerName server_name = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; /** * required .hbase.pb.ServerName server_name = 2; @@ -3183,7 +3277,7 @@ public final class ZooKeeperProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName() { if (serverNameBuilder_ == null) { - return serverName_; + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } else { return serverNameBuilder_.getMessage(); } @@ -3224,6 +3318,7 @@ public final class ZooKeeperProtos { public Builder mergeServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + serverName_ != null && serverName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); @@ -3242,7 +3337,7 @@ public final class ZooKeeperProtos { */ public Builder clearServerName() { if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + serverName_ = null; onChanged(); } else { serverNameBuilder_.clear(); @@ -3265,19 +3360,20 @@ public final class ZooKeeperProtos { if (serverNameBuilder_ != null) { return serverNameBuilder_.getMessageOrBuilder(); } else { - return serverName_; + return serverName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } } /** * required .hbase.pb.ServerName server_name = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerNameFieldBuilder() { if (serverNameBuilder_ == null) { - serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - serverName_, + getServerName(), getParentForChildren(), isClean()); serverName_ = null; @@ -3285,8 +3381,7 @@ public final class ZooKeeperProtos { return serverNameBuilder_; } - // optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN]; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode mode_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN; + private int mode_ = 0; /** * optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN]; */ @@ -3297,7 +3392,8 @@ public final class ZooKeeperProtos { * optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode getMode() { - return mode_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.valueOf(mode_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN : result; } /** * optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN]; @@ -3307,7 +3403,7 @@ public final class ZooKeeperProtos { throw new NullPointerException(); } bitField0_ |= 0x00000004; - mode_ = value; + mode_ = value.getNumber(); onChanged(); return this; } @@ -3316,86 +3412,115 @@ public final class ZooKeeperProtos { */ public Builder clearMode() { bitField0_ = (bitField0_ & ~0x00000004); - mode_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN; + mode_ = 0; onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SplitLogTask) } + // @@protoc_insertion_point(class_scope:hbase.pb.SplitLogTask) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask DEFAULT_INSTANCE; static { - defaultInstance = new SplitLogTask(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SplitLogTask parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SplitLogTask(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SplitLogTask) } - public interface DeprecatedTableStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface DeprecatedTableStateOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.DeprecatedTableState) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; /** - * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; - * *
      * This is the table's state.  If no znode for a table,
      * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
      * for more.
      * 
+ * + * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; */ boolean hasState(); /** - * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; - * *
      * This is the table's state.  If no znode for a table,
      * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
      * for more.
      * 
+ * + * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; */ org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State getState(); } /** - * Protobuf type {@code hbase.pb.DeprecatedTableState} - * *
    **
    * The znode that holds state of table.
    * Deprected, table state is stored in table descriptor on HDFS.
    * 
+ * + * Protobuf type {@code hbase.pb.DeprecatedTableState} */ - public static final class DeprecatedTableState extends - com.google.protobuf.GeneratedMessage - implements DeprecatedTableStateOrBuilder { + public static final class DeprecatedTableState extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.DeprecatedTableState) + DeprecatedTableStateOrBuilder { // Use DeprecatedTableState.newBuilder() to construct. - private DeprecatedTableState(com.google.protobuf.GeneratedMessage.Builder builder) { + private DeprecatedTableState(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DeprecatedTableState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeprecatedTableState defaultInstance; - public static DeprecatedTableState getDefaultInstance() { - return defaultInstance; } - - public DeprecatedTableState getDefaultInstanceForType() { - return defaultInstance; + private DeprecatedTableState() { + state_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private DeprecatedTableState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -3421,7 +3546,7 @@ public final class ZooKeeperProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - state_ = value; + state_ = rawValue; } break; } @@ -3431,7 +3556,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -3442,53 +3567,38 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_DeprecatedTableState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_DeprecatedTableState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeprecatedTableState parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeprecatedTableState(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** - * Protobuf enum {@code hbase.pb.DeprecatedTableState.State} - * *
      * Table's current state
      * 
+ * + * Protobuf enum {@code hbase.pb.DeprecatedTableState.State} */ public enum State implements com.google.protobuf.ProtocolMessageEnum { /** * ENABLED = 0; */ - ENABLED(0, 0), + ENABLED(0), /** * DISABLED = 1; */ - DISABLED(1, 1), + DISABLED(1), /** * DISABLING = 2; */ - DISABLING(2, 2), + DISABLING(2), /** * ENABLING = 3; */ - ENABLING(3, 3), + ENABLING(3), ; /** @@ -3509,9 +3619,19 @@ public final class ZooKeeperProtos { public static final int ENABLING_VALUE = 3; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static State valueOf(int value) { + return forNumber(value); + } + + public static State forNumber(int value) { switch (value) { case 0: return ENABLED; case 1: return DISABLED; @@ -3525,17 +3645,17 @@ public final class ZooKeeperProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public State findValueByNumber(int number) { - return State.valueOf(number); + return State.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -3557,11 +3677,9 @@ public final class ZooKeeperProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private State(int index, int value) { - this.index = index; + private State(int value) { this.value = value; } @@ -3569,41 +3687,39 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; public static final int STATE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State state_; + private int state_; /** - * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; - * *
      * This is the table's state.  If no znode for a table,
      * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
      * for more.
      * 
+ * + * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; - * *
      * This is the table's state.  If no znode for a table,
      * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
      * for more.
      * 
+ * + * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.ENABLED : result; } - private void initFields() { - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.ENABLED; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasState()) { memoizedIsInitialized = 0; @@ -3615,36 +3731,28 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, state_.getNumber()); + output.writeEnum(1, state_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, state_.getNumber()); + .computeEnumSize(1, state_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -3657,15 +3765,12 @@ public final class ZooKeeperProtos { boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { - result = result && - (getState() == other.getState()); + result = result && state_ == other.state_; } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -3675,9 +3780,9 @@ public final class ZooKeeperProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); + hash = (53 * hash) + state_; } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -3705,67 +3810,79 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.DeprecatedTableState} - * *
      **
      * The znode that holds state of table.
      * Deprected, table state is stored in table descriptor on HDFS.
      * 
+ * + * Protobuf type {@code hbase.pb.DeprecatedTableState} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableStateOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.DeprecatedTableState) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_DeprecatedTableState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_DeprecatedTableState_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3778,29 +3895,22 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.ENABLED; + state_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_DeprecatedTableState_descriptor; @@ -3831,6 +3941,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState)other); @@ -3845,13 +3981,13 @@ public final class ZooKeeperProtos { if (other.hasState()) { setState(other.getState()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasState()) { - return false; } return true; @@ -3866,7 +4002,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -3876,81 +4012,118 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.ENABLED; + private int state_ = 0; /** - * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; - * *
        * This is the table's state.  If no znode for a table,
        * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
        * for more.
        * 
+ * + * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; - * *
        * This is the table's state.  If no znode for a table,
        * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
        * for more.
        * 
+ * + * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.ENABLED : result; } /** - * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; - * *
        * This is the table's state.  If no znode for a table,
        * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
        * for more.
        * 
+ * + * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; */ public Builder setState(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - state_ = value; + state_ = value.getNumber(); onChanged(); return this; } /** - * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; - * *
        * This is the table's state.  If no znode for a table,
        * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
        * for more.
        * 
+ * + * required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED]; */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.ENABLED; + state_ = 0; onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.DeprecatedTableState) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.DeprecatedTableState) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeprecatedTableState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeprecatedTableState(input, extensionRegistry); + } + }; - // @@protoc_insertion_point(builder_scope:hbase.pb.DeprecatedTableState) + public static com.google.protobuf.Parser parser() { + return PARSER; } - static { - defaultInstance = new DeprecatedTableState(true); - defaultInstance.initFields(); + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.DeprecatedTableState) } - public interface TableCFOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TableCFOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TableCF) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.TableName table_name = 1; /** * optional .hbase.pb.TableName table_name = 1; */ @@ -3964,7 +4137,6 @@ public final class ZooKeeperProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // repeated bytes families = 2; /** * repeated bytes families = 2; */ @@ -3981,36 +4153,28 @@ public final class ZooKeeperProtos { /** * Protobuf type {@code hbase.pb.TableCF} */ - public static final class TableCF extends - com.google.protobuf.GeneratedMessage - implements TableCFOrBuilder { + public static final class TableCF extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TableCF) + TableCFOrBuilder { // Use TableCF.newBuilder() to construct. - private TableCF(com.google.protobuf.GeneratedMessage.Builder builder) { + private TableCF(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TableCF(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TableCF defaultInstance; - public static TableCF getDefaultInstance() { - return defaultInstance; } - - public TableCF getDefaultInstanceForType() { - return defaultInstance; + private TableCF() { + families_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TableCF( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4056,7 +4220,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { families_ = java.util.Collections.unmodifiableList(families_); @@ -4070,30 +4234,14 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_TableCF_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_TableCF_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TableCF parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TableCF(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -4106,16 +4254,15 @@ public final class ZooKeeperProtos { * optional .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * optional .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // repeated bytes families = 2; public static final int FAMILIES_FIELD_NUMBER = 2; private java.util.List families_; /** @@ -4138,14 +4285,11 @@ public final class ZooKeeperProtos { return families_.get(index); } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - families_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasTableName()) { if (!getTableName().isInitialized()) { @@ -4159,25 +4303,23 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } for (int i = 0; i < families_.size(); i++) { output.writeBytes(2, families_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } { int dataSize = 0; @@ -4188,19 +4330,13 @@ public final class ZooKeeperProtos { size += dataSize; size += 1 * getFamiliesList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -4218,12 +4354,10 @@ public final class ZooKeeperProtos { } result = result && getFamiliesList() .equals(other.getFamiliesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -4239,7 +4373,7 @@ public final class ZooKeeperProtos { hash = (37 * hash) + FAMILIES_FIELD_NUMBER; hash = (53 * hash) + getFamiliesList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -4267,46 +4401,57 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -4314,14 +4459,15 @@ public final class ZooKeeperProtos { * Protobuf type {@code hbase.pb.TableCF} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCFOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TableCF) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCFOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_TableCF_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_TableCF_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4334,23 +4480,20 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } @@ -4360,10 +4503,6 @@ public final class ZooKeeperProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_TableCF_descriptor; @@ -4403,6 +4542,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF)other); @@ -4427,14 +4592,14 @@ public final class ZooKeeperProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasTableName()) { if (!getTableName().isInitialized()) { - return false; } } @@ -4450,7 +4615,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -4460,9 +4625,8 @@ public final class ZooKeeperProtos { } private int bitField0_; - // optional .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * optional .hbase.pb.TableName table_name = 1; @@ -4475,7 +4639,7 @@ public final class ZooKeeperProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -4516,6 +4680,7 @@ public final class ZooKeeperProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -4534,7 +4699,7 @@ public final class ZooKeeperProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -4557,19 +4722,20 @@ public final class ZooKeeperProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * optional .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -4577,7 +4743,6 @@ public final class ZooKeeperProtos { return tableNameBuilder_; } - // repeated bytes families = 2; private java.util.List families_ = java.util.Collections.emptyList(); private void ensureFamiliesIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { @@ -4635,7 +4800,8 @@ public final class ZooKeeperProtos { public Builder addAllFamilies( java.lang.Iterable values) { ensureFamiliesIsMutable(); - super.addAll(values, families_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, families_); onChanged(); return this; } @@ -4648,52 +4814,88 @@ public final class ZooKeeperProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TableCF) } + // @@protoc_insertion_point(class_scope:hbase.pb.TableCF) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF DEFAULT_INSTANCE; static { - defaultInstance = new TableCF(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TableCF parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TableCF(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TableCF) } - public interface ReplicationPeerOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReplicationPeerOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReplicationPeer) + com.google.protobuf.MessageOrBuilder { - // required string clusterkey = 1; /** - * required string clusterkey = 1; - * *
      * clusterkey is the concatenation of the slave cluster's
      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
      * 
+ * + * required string clusterkey = 1; */ boolean hasClusterkey(); /** - * required string clusterkey = 1; - * *
      * clusterkey is the concatenation of the slave cluster's
      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
      * 
+ * + * required string clusterkey = 1; */ java.lang.String getClusterkey(); /** - * required string clusterkey = 1; - * *
      * clusterkey is the concatenation of the slave cluster's
      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
      * 
+ * + * required string clusterkey = 1; */ com.google.protobuf.ByteString getClusterkeyBytes(); - // optional string replicationEndpointImpl = 2; /** * optional string replicationEndpointImpl = 2; */ @@ -4708,7 +4910,6 @@ public final class ZooKeeperProtos { com.google.protobuf.ByteString getReplicationEndpointImplBytes(); - // repeated .hbase.pb.BytesBytesPair data = 3; /** * repeated .hbase.pb.BytesBytesPair data = 3; */ @@ -4733,7 +4934,6 @@ public final class ZooKeeperProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getDataOrBuilder( int index); - // repeated .hbase.pb.NameStringPair configuration = 4; /** * repeated .hbase.pb.NameStringPair configuration = 4; */ @@ -4758,7 +4958,6 @@ public final class ZooKeeperProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); - // repeated .hbase.pb.TableCF table_cfs = 5; /** * repeated .hbase.pb.TableCF table_cfs = 5; */ @@ -4783,7 +4982,6 @@ public final class ZooKeeperProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCFOrBuilder getTableCfsOrBuilder( int index); - // repeated bytes namespaces = 6; /** * repeated bytes namespaces = 6; */ @@ -4798,43 +4996,40 @@ public final class ZooKeeperProtos { com.google.protobuf.ByteString getNamespaces(int index); } /** - * Protobuf type {@code hbase.pb.ReplicationPeer} - * *
    **
    * Used by replication. Holds a replication peer key.
    * 
+ * + * Protobuf type {@code hbase.pb.ReplicationPeer} */ - public static final class ReplicationPeer extends - com.google.protobuf.GeneratedMessage - implements ReplicationPeerOrBuilder { + public static final class ReplicationPeer extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReplicationPeer) + ReplicationPeerOrBuilder { // Use ReplicationPeer.newBuilder() to construct. - private ReplicationPeer(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReplicationPeer(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReplicationPeer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReplicationPeer defaultInstance; - public static ReplicationPeer getDefaultInstance() { - return defaultInstance; } - - public ReplicationPeer getDefaultInstanceForType() { - return defaultInstance; + private ReplicationPeer() { + clusterkey_ = ""; + replicationEndpointImpl_ = ""; + data_ = java.util.Collections.emptyList(); + configuration_ = java.util.Collections.emptyList(); + tableCfs_ = java.util.Collections.emptyList(); + namespaces_ = java.util.Collections.emptyList(); } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReplicationPeer( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -4854,13 +5049,15 @@ public final class ZooKeeperProtos { break; } case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; - clusterkey_ = input.readBytes(); + clusterkey_ = bs; break; } case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; - replicationEndpointImpl_ = input.readBytes(); + replicationEndpointImpl_ = bs; break; } case 26: { @@ -4868,7 +5065,8 @@ public final class ZooKeeperProtos { data_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } - data_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); + data_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } case 34: { @@ -4876,7 +5074,8 @@ public final class ZooKeeperProtos { configuration_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } - configuration_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); + configuration_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } case 42: { @@ -4884,7 +5083,8 @@ public final class ZooKeeperProtos { tableCfs_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000010; } - tableCfs_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF.PARSER, extensionRegistry)); + tableCfs_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF.PARSER, extensionRegistry)); break; } case 50: { @@ -4901,7 +5101,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { data_ = java.util.Collections.unmodifiableList(data_); @@ -4924,50 +5124,34 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationPeer_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationPeer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReplicationPeer parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReplicationPeer(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required string clusterkey = 1; public static final int CLUSTERKEY_FIELD_NUMBER = 1; - private java.lang.Object clusterkey_; + private volatile java.lang.Object clusterkey_; /** - * required string clusterkey = 1; - * *
      * clusterkey is the concatenation of the slave cluster's
      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
      * 
+ * + * required string clusterkey = 1; */ public boolean hasClusterkey() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string clusterkey = 1; - * *
      * clusterkey is the concatenation of the slave cluster's
      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
      * 
+ * + * required string clusterkey = 1; */ public java.lang.String getClusterkey() { java.lang.Object ref = clusterkey_; @@ -4984,12 +5168,12 @@ public final class ZooKeeperProtos { } } /** - * required string clusterkey = 1; - * *
      * clusterkey is the concatenation of the slave cluster's
      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
      * 
+ * + * required string clusterkey = 1; */ public com.google.protobuf.ByteString getClusterkeyBytes() { @@ -5005,9 +5189,8 @@ public final class ZooKeeperProtos { } } - // optional string replicationEndpointImpl = 2; public static final int REPLICATIONENDPOINTIMPL_FIELD_NUMBER = 2; - private java.lang.Object replicationEndpointImpl_; + private volatile java.lang.Object replicationEndpointImpl_; /** * optional string replicationEndpointImpl = 2; */ @@ -5048,7 +5231,6 @@ public final class ZooKeeperProtos { } } - // repeated .hbase.pb.BytesBytesPair data = 3; public static final int DATA_FIELD_NUMBER = 3; private java.util.List data_; /** @@ -5084,7 +5266,6 @@ public final class ZooKeeperProtos { return data_.get(index); } - // repeated .hbase.pb.NameStringPair configuration = 4; public static final int CONFIGURATION_FIELD_NUMBER = 4; private java.util.List configuration_; /** @@ -5120,7 +5301,6 @@ public final class ZooKeeperProtos { return configuration_.get(index); } - // repeated .hbase.pb.TableCF table_cfs = 5; public static final int TABLE_CFS_FIELD_NUMBER = 5; private java.util.List tableCfs_; /** @@ -5156,7 +5336,6 @@ public final class ZooKeeperProtos { return tableCfs_.get(index); } - // repeated bytes namespaces = 6; public static final int NAMESPACES_FIELD_NUMBER = 6; private java.util.List namespaces_; /** @@ -5179,18 +5358,11 @@ public final class ZooKeeperProtos { return namespaces_.get(index); } - private void initFields() { - clusterkey_ = ""; - replicationEndpointImpl_ = ""; - data_ = java.util.Collections.emptyList(); - configuration_ = java.util.Collections.emptyList(); - tableCfs_ = java.util.Collections.emptyList(); - namespaces_ = java.util.Collections.emptyList(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasClusterkey()) { memoizedIsInitialized = 0; @@ -5220,12 +5392,11 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getClusterkeyBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, clusterkey_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getReplicationEndpointImplBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, replicationEndpointImpl_); } for (int i = 0; i < data_.size(); i++) { output.writeMessage(3, data_.get(i)); @@ -5239,22 +5410,19 @@ public final class ZooKeeperProtos { for (int i = 0; i < namespaces_.size(); i++) { output.writeBytes(6, namespaces_.get(i)); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getClusterkeyBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, clusterkey_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getReplicationEndpointImplBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, replicationEndpointImpl_); } for (int i = 0; i < data_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -5277,19 +5445,13 @@ public final class ZooKeeperProtos { size += dataSize; size += 1 * getNamespacesList().size(); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -5318,12 +5480,10 @@ public final class ZooKeeperProtos { .equals(other.getTableCfsList()); result = result && getNamespacesList() .equals(other.getNamespacesList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -5355,7 +5515,7 @@ public final class ZooKeeperProtos { hash = (37 * hash) + NAMESPACES_FIELD_NUMBER; hash = (53 * hash) + getNamespacesList().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -5383,66 +5543,78 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ReplicationPeer} - * *
      **
      * Used by replication. Holds a replication peer key.
      * 
+ * + * Protobuf type {@code hbase.pb.ReplicationPeer} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeerOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReplicationPeer) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationPeer_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationPeer_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5455,21 +5627,18 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getDataFieldBuilder(); getConfigurationFieldBuilder(); getTableCfsFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); clusterkey_ = ""; @@ -5499,10 +5668,6 @@ public final class ZooKeeperProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationPeer_descriptor; @@ -5569,6 +5734,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer)other); @@ -5609,7 +5800,7 @@ public final class ZooKeeperProtos { data_ = other.data_; bitField0_ = (bitField0_ & ~0x00000004); dataBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getDataFieldBuilder() : null; } else { dataBuilder_.addAllMessages(other.data_); @@ -5635,7 +5826,7 @@ public final class ZooKeeperProtos { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); configurationBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); @@ -5661,7 +5852,7 @@ public final class ZooKeeperProtos { tableCfs_ = other.tableCfs_; bitField0_ = (bitField0_ & ~0x00000010); tableCfsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTableCfsFieldBuilder() : null; } else { tableCfsBuilder_.addAllMessages(other.tableCfs_); @@ -5678,30 +5869,27 @@ public final class ZooKeeperProtos { } onChanged(); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasClusterkey()) { - return false; } for (int i = 0; i < getDataCount(); i++) { if (!getData(i).isInitialized()) { - return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { - return false; } } for (int i = 0; i < getTableCfsCount(); i++) { if (!getTableCfs(i).isInitialized()) { - return false; } } @@ -5717,7 +5905,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -5727,45 +5915,47 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required string clusterkey = 1; private java.lang.Object clusterkey_ = ""; /** - * required string clusterkey = 1; - * *
        * clusterkey is the concatenation of the slave cluster's
        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
        * 
+ * + * required string clusterkey = 1; */ public boolean hasClusterkey() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string clusterkey = 1; - * *
        * clusterkey is the concatenation of the slave cluster's
        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
        * 
+ * + * required string clusterkey = 1; */ public java.lang.String getClusterkey() { java.lang.Object ref = clusterkey_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - clusterkey_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + clusterkey_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - * required string clusterkey = 1; - * *
        * clusterkey is the concatenation of the slave cluster's
        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
        * 
+ * + * required string clusterkey = 1; */ public com.google.protobuf.ByteString getClusterkeyBytes() { @@ -5781,12 +5971,12 @@ public final class ZooKeeperProtos { } } /** - * required string clusterkey = 1; - * *
        * clusterkey is the concatenation of the slave cluster's
        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
        * 
+ * + * required string clusterkey = 1; */ public Builder setClusterkey( java.lang.String value) { @@ -5799,12 +5989,12 @@ public final class ZooKeeperProtos { return this; } /** - * required string clusterkey = 1; - * *
        * clusterkey is the concatenation of the slave cluster's
        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
        * 
+ * + * required string clusterkey = 1; */ public Builder clearClusterkey() { bitField0_ = (bitField0_ & ~0x00000001); @@ -5813,12 +6003,12 @@ public final class ZooKeeperProtos { return this; } /** - * required string clusterkey = 1; - * *
        * clusterkey is the concatenation of the slave cluster's
        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
        * 
+ * + * required string clusterkey = 1; */ public Builder setClusterkeyBytes( com.google.protobuf.ByteString value) { @@ -5831,7 +6021,6 @@ public final class ZooKeeperProtos { return this; } - // optional string replicationEndpointImpl = 2; private java.lang.Object replicationEndpointImpl_ = ""; /** * optional string replicationEndpointImpl = 2; @@ -5845,9 +6034,12 @@ public final class ZooKeeperProtos { public java.lang.String getReplicationEndpointImpl() { java.lang.Object ref = replicationEndpointImpl_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - replicationEndpointImpl_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + replicationEndpointImpl_ = s; + } return s; } else { return (java.lang.String) ref; @@ -5905,7 +6097,6 @@ public final class ZooKeeperProtos { return this; } - // repeated .hbase.pb.BytesBytesPair data = 3; private java.util.List data_ = java.util.Collections.emptyList(); private void ensureDataIsMutable() { @@ -5915,7 +6106,7 @@ public final class ZooKeeperProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> dataBuilder_; /** @@ -6047,7 +6238,8 @@ public final class ZooKeeperProtos { java.lang.Iterable values) { if (dataBuilder_ == null) { ensureDataIsMutable(); - super.addAll(values, data_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, data_); onChanged(); } else { dataBuilder_.addAllMessages(values); @@ -6130,11 +6322,11 @@ public final class ZooKeeperProtos { getDataBuilderList() { return getDataFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getDataFieldBuilder() { if (dataBuilder_ == null) { - dataBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + dataBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( data_, ((bitField0_ & 0x00000004) == 0x00000004), @@ -6145,7 +6337,6 @@ public final class ZooKeeperProtos { return dataBuilder_; } - // repeated .hbase.pb.NameStringPair configuration = 4; private java.util.List configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { @@ -6155,7 +6346,7 @@ public final class ZooKeeperProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** @@ -6287,7 +6478,8 @@ public final class ZooKeeperProtos { java.lang.Iterable values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); - super.addAll(values, configuration_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); @@ -6370,11 +6562,11 @@ public final class ZooKeeperProtos { getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { - configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000008) == 0x00000008), @@ -6385,7 +6577,6 @@ public final class ZooKeeperProtos { return configurationBuilder_; } - // repeated .hbase.pb.TableCF table_cfs = 5; private java.util.List tableCfs_ = java.util.Collections.emptyList(); private void ensureTableCfsIsMutable() { @@ -6395,7 +6586,7 @@ public final class ZooKeeperProtos { } } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCFOrBuilder> tableCfsBuilder_; /** @@ -6527,7 +6718,8 @@ public final class ZooKeeperProtos { java.lang.Iterable values) { if (tableCfsBuilder_ == null) { ensureTableCfsIsMutable(); - super.addAll(values, tableCfs_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, tableCfs_); onChanged(); } else { tableCfsBuilder_.addAllMessages(values); @@ -6610,11 +6802,11 @@ public final class ZooKeeperProtos { getTableCfsBuilderList() { return getTableCfsFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilder< + private com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCFOrBuilder> getTableCfsFieldBuilder() { if (tableCfsBuilder_ == null) { - tableCfsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + tableCfsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCF.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableCFOrBuilder>( tableCfs_, ((bitField0_ & 0x00000010) == 0x00000010), @@ -6625,7 +6817,6 @@ public final class ZooKeeperProtos { return tableCfsBuilder_; } - // repeated bytes namespaces = 6; private java.util.List namespaces_ = java.util.Collections.emptyList(); private void ensureNamespacesIsMutable() { if (!((bitField0_ & 0x00000020) == 0x00000020)) { @@ -6683,7 +6874,8 @@ public final class ZooKeeperProtos { public Builder addAllNamespaces( java.lang.Iterable values) { ensureNamespacesIsMutable(); - super.addAll(values, namespaces_); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, namespaces_); onChanged(); return this; } @@ -6696,22 +6888,59 @@ public final class ZooKeeperProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReplicationPeer) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReplicationPeer) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer DEFAULT_INSTANCE; static { - defaultInstance = new ReplicationPeer(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReplicationPeer parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicationPeer(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationPeer getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReplicationPeer) } - public interface ReplicationStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReplicationStateOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReplicationState) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.ReplicationState.State state = 1; /** * required .hbase.pb.ReplicationState.State state = 1; */ @@ -6722,43 +6951,35 @@ public final class ZooKeeperProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState(); } /** - * Protobuf type {@code hbase.pb.ReplicationState} - * *
    **
    * Used by replication. Holds whether enabled or disabled
    * 
+ * + * Protobuf type {@code hbase.pb.ReplicationState} */ - public static final class ReplicationState extends - com.google.protobuf.GeneratedMessage - implements ReplicationStateOrBuilder { + public static final class ReplicationState extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReplicationState) + ReplicationStateOrBuilder { // Use ReplicationState.newBuilder() to construct. - private ReplicationState(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReplicationState(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReplicationState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReplicationState defaultInstance; - public static ReplicationState getDefaultInstance() { - return defaultInstance; } - - public ReplicationState getDefaultInstanceForType() { - return defaultInstance; + private ReplicationState() { + state_ = 0; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReplicationState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -6784,7 +7005,7 @@ public final class ZooKeeperProtos { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; - state_ = value; + state_ = rawValue; } break; } @@ -6794,7 +7015,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -6805,28 +7026,13 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReplicationState parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReplicationState(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.ReplicationState.State} */ @@ -6835,11 +7041,11 @@ public final class ZooKeeperProtos { /** * ENABLED = 0; */ - ENABLED(0, 0), + ENABLED(0), /** * DISABLED = 1; */ - DISABLED(1, 1), + DISABLED(1), ; /** @@ -6852,9 +7058,19 @@ public final class ZooKeeperProtos { public static final int DISABLED_VALUE = 1; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static State valueOf(int value) { + return forNumber(value); + } + + public static State forNumber(int value) { switch (value) { case 0: return ENABLED; case 1: return DISABLED; @@ -6866,17 +7082,17 @@ public final class ZooKeeperProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public State findValueByNumber(int number) { - return State.valueOf(number); + return State.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -6898,11 +7114,9 @@ public final class ZooKeeperProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private State(int index, int value) { - this.index = index; + private State(int value) { this.value = value; } @@ -6910,9 +7124,8 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required .hbase.pb.ReplicationState.State state = 1; public static final int STATE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State state_; + private int state_; /** * required .hbase.pb.ReplicationState.State state = 1; */ @@ -6923,16 +7136,15 @@ public final class ZooKeeperProtos { * required .hbase.pb.ReplicationState.State state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED : result; } - private void initFields() { - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasState()) { memoizedIsInitialized = 0; @@ -6944,36 +7156,28 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, state_.getNumber()); + output.writeEnum(1, state_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, state_.getNumber()); + .computeEnumSize(1, state_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -6986,15 +7190,12 @@ public final class ZooKeeperProtos { boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { - result = result && - (getState() == other.getState()); + result = result && state_ == other.state_; } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7004,9 +7205,9 @@ public final class ZooKeeperProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); + hash = (53 * hash) + state_; } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7034,66 +7235,78 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ReplicationState} - * *
      **
      * Used by replication. Holds whether enabled or disabled
      * 
+ * + * Protobuf type {@code hbase.pb.ReplicationState} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationStateOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReplicationState) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationState_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7106,27 +7319,20 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); + state_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; } public com.google.protobuf.Descriptors.Descriptor @@ -7159,6 +7365,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState)other); @@ -7173,13 +7405,13 @@ public final class ZooKeeperProtos { if (other.hasState()) { setState(other.getState()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasState()) { - return false; } return true; @@ -7194,7 +7426,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7204,8 +7436,7 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required .hbase.pb.ReplicationState.State state = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; + private int state_ = 0; /** * required .hbase.pb.ReplicationState.State state = 1; */ @@ -7216,7 +7447,8 @@ public final class ZooKeeperProtos { * required .hbase.pb.ReplicationState.State state = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED : result; } /** * required .hbase.pb.ReplicationState.State state = 1; @@ -7226,7 +7458,7 @@ public final class ZooKeeperProtos { throw new NullPointerException(); } bitField0_ |= 0x00000001; - state_ = value; + state_ = value.getNumber(); onChanged(); return this; } @@ -7235,26 +7467,63 @@ public final class ZooKeeperProtos { */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED; + state_ = 0; onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReplicationState) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReplicationState) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState DEFAULT_INSTANCE; static { - defaultInstance = new ReplicationState(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReplicationState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicationState(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationState getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReplicationState) } - public interface ReplicationHLogPositionOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface ReplicationHLogPositionOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ReplicationHLogPosition) + com.google.protobuf.MessageOrBuilder { - // required int64 position = 1; /** * required int64 position = 1; */ @@ -7265,43 +7534,35 @@ public final class ZooKeeperProtos { long getPosition(); } /** - * Protobuf type {@code hbase.pb.ReplicationHLogPosition} - * *
    **
    * Used by replication. Holds the current position in an WAL file.
    * 
+ * + * Protobuf type {@code hbase.pb.ReplicationHLogPosition} */ - public static final class ReplicationHLogPosition extends - com.google.protobuf.GeneratedMessage - implements ReplicationHLogPositionOrBuilder { + public static final class ReplicationHLogPosition extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ReplicationHLogPosition) + ReplicationHLogPositionOrBuilder { // Use ReplicationHLogPosition.newBuilder() to construct. - private ReplicationHLogPosition(com.google.protobuf.GeneratedMessage.Builder builder) { + private ReplicationHLogPosition(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ReplicationHLogPosition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ReplicationHLogPosition defaultInstance; - public static ReplicationHLogPosition getDefaultInstance() { - return defaultInstance; } - - public ReplicationHLogPosition getDefaultInstanceForType() { - return defaultInstance; + private ReplicationHLogPosition() { + position_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private ReplicationHLogPosition( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7331,7 +7592,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7342,30 +7603,14 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationHLogPosition_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationHLogPosition_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ReplicationHLogPosition parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ReplicationHLogPosition(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // required int64 position = 1; public static final int POSITION_FIELD_NUMBER = 1; private long position_; /** @@ -7381,13 +7626,11 @@ public final class ZooKeeperProtos { return position_; } - private void initFields() { - position_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasPosition()) { memoizedIsInitialized = 0; @@ -7399,16 +7642,14 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, position_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -7416,19 +7657,13 @@ public final class ZooKeeperProtos { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, position_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -7444,12 +7679,10 @@ public final class ZooKeeperProtos { result = result && (getPosition() == other.getPosition()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -7459,9 +7692,10 @@ public final class ZooKeeperProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPosition()) { hash = (37 * hash) + POSITION_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getPosition()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getPosition()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -7489,66 +7723,78 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.ReplicationHLogPosition} - * *
      **
      * Used by replication. Holds the current position in an WAL file.
      * 
+ * + * Protobuf type {@code hbase.pb.ReplicationHLogPosition} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPositionOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ReplicationHLogPosition) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPositionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationHLogPosition_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationHLogPosition_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7561,18 +7807,15 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); position_ = 0L; @@ -7580,10 +7823,6 @@ public final class ZooKeeperProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ReplicationHLogPosition_descriptor; @@ -7614,6 +7853,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition)other); @@ -7628,13 +7893,13 @@ public final class ZooKeeperProtos { if (other.hasPosition()) { setPosition(other.getPosition()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasPosition()) { - return false; } return true; @@ -7649,7 +7914,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -7659,7 +7924,6 @@ public final class ZooKeeperProtos { } private int bitField0_; - // required int64 position = 1; private long position_ ; /** * required int64 position = 1; @@ -7691,22 +7955,59 @@ public final class ZooKeeperProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ReplicationHLogPosition) } + // @@protoc_insertion_point(class_scope:hbase.pb.ReplicationHLogPosition) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition DEFAULT_INSTANCE; static { - defaultInstance = new ReplicationHLogPosition(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ReplicationHLogPosition parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ReplicationHLogPosition(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.ReplicationHLogPosition) } - public interface TableLockOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface TableLockOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TableLock) + com.google.protobuf.MessageOrBuilder { - // optional .hbase.pb.TableName table_name = 1; /** * optional .hbase.pb.TableName table_name = 1; */ @@ -7720,7 +8021,6 @@ public final class ZooKeeperProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - // optional .hbase.pb.ServerName lock_owner = 2; /** * optional .hbase.pb.ServerName lock_owner = 2; */ @@ -7734,7 +8034,6 @@ public final class ZooKeeperProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getLockOwnerOrBuilder(); - // optional int64 thread_id = 3; /** * optional int64 thread_id = 3; */ @@ -7744,7 +8043,6 @@ public final class ZooKeeperProtos { */ long getThreadId(); - // optional bool is_shared = 4; /** * optional bool is_shared = 4; */ @@ -7754,7 +8052,6 @@ public final class ZooKeeperProtos { */ boolean getIsShared(); - // optional string purpose = 5; /** * optional string purpose = 5; */ @@ -7769,7 +8066,6 @@ public final class ZooKeeperProtos { com.google.protobuf.ByteString getPurposeBytes(); - // optional int64 create_time = 6; /** * optional int64 create_time = 6; */ @@ -7780,43 +8076,38 @@ public final class ZooKeeperProtos { long getCreateTime(); } /** - * Protobuf type {@code hbase.pb.TableLock} - * *
    **
    * Metadata associated with a table lock in zookeeper
    * 
+ * + * Protobuf type {@code hbase.pb.TableLock} */ - public static final class TableLock extends - com.google.protobuf.GeneratedMessage - implements TableLockOrBuilder { + public static final class TableLock extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TableLock) + TableLockOrBuilder { // Use TableLock.newBuilder() to construct. - private TableLock(com.google.protobuf.GeneratedMessage.Builder builder) { + private TableLock(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); } - private TableLock(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TableLock defaultInstance; - public static TableLock getDefaultInstance() { - return defaultInstance; - } - - public TableLock getDefaultInstanceForType() { - return defaultInstance; + private TableLock() { + threadId_ = 0L; + isShared_ = false; + purpose_ = ""; + createTime_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private TableLock( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -7872,8 +8163,9 @@ public final class ZooKeeperProtos { break; } case 42: { + com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; - purpose_ = input.readBytes(); + purpose_ = bs; break; } case 48: { @@ -7887,7 +8179,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -7898,30 +8190,14 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_TableLock_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_TableLock_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TableLock parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TableLock(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** @@ -7934,16 +8210,15 @@ public final class ZooKeeperProtos { * optional .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * optional .hbase.pb.TableName table_name = 1; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } - // optional .hbase.pb.ServerName lock_owner = 2; public static final int LOCK_OWNER_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName lockOwner_; /** @@ -7956,16 +8231,15 @@ public final class ZooKeeperProtos { * optional .hbase.pb.ServerName lock_owner = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getLockOwner() { - return lockOwner_; + return lockOwner_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : lockOwner_; } /** * optional .hbase.pb.ServerName lock_owner = 2; */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getLockOwnerOrBuilder() { - return lockOwner_; + return lockOwner_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : lockOwner_; } - // optional int64 thread_id = 3; public static final int THREAD_ID_FIELD_NUMBER = 3; private long threadId_; /** @@ -7981,7 +8255,6 @@ public final class ZooKeeperProtos { return threadId_; } - // optional bool is_shared = 4; public static final int IS_SHARED_FIELD_NUMBER = 4; private boolean isShared_; /** @@ -7997,9 +8270,8 @@ public final class ZooKeeperProtos { return isShared_; } - // optional string purpose = 5; public static final int PURPOSE_FIELD_NUMBER = 5; - private java.lang.Object purpose_; + private volatile java.lang.Object purpose_; /** * optional string purpose = 5; */ @@ -8040,7 +8312,6 @@ public final class ZooKeeperProtos { } } - // optional int64 create_time = 6; public static final int CREATE_TIME_FIELD_NUMBER = 6; private long createTime_; /** @@ -8056,18 +8327,11 @@ public final class ZooKeeperProtos { return createTime_; } - private void initFields() { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - lockOwner_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - threadId_ = 0L; - isShared_ = false; - purpose_ = ""; - createTime_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (hasTableName()) { if (!getTableName().isInitialized()) { @@ -8087,12 +8351,11 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); + output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, lockOwner_); + output.writeMessage(2, getLockOwner()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt64(3, threadId_); @@ -8101,27 +8364,26 @@ public final class ZooKeeperProtos { output.writeBool(4, isShared_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBytes(5, getPurposeBytes()); + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, purpose_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeInt64(6, createTime_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); + .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, lockOwner_); + .computeMessageSize(2, getLockOwner()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -8132,26 +8394,19 @@ public final class ZooKeeperProtos { .computeBoolSize(4, isShared_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(5, getPurposeBytes()); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, purpose_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(6, createTime_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -8192,12 +8447,10 @@ public final class ZooKeeperProtos { result = result && (getCreateTime() == other.getCreateTime()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -8215,11 +8468,13 @@ public final class ZooKeeperProtos { } if (hasThreadId()) { hash = (37 * hash) + THREAD_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getThreadId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getThreadId()); } if (hasIsShared()) { hash = (37 * hash) + IS_SHARED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getIsShared()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIsShared()); } if (hasPurpose()) { hash = (37 * hash) + PURPOSE_FIELD_NUMBER; @@ -8227,9 +8482,10 @@ public final class ZooKeeperProtos { } if (hasCreateTime()) { hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCreateTime()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getCreateTime()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -8257,66 +8513,78 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.TableLock} - * *
      **
      * Metadata associated with a table lock in zookeeper
      * 
+ * + * Protobuf type {@code hbase.pb.TableLock} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLockOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TableLock) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLockOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_TableLock_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_TableLock_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -8329,30 +8597,27 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getLockOwnerFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (lockOwnerBuilder_ == null) { - lockOwner_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + lockOwner_ = null; } else { lockOwnerBuilder_.clear(); } @@ -8368,10 +8633,6 @@ public final class ZooKeeperProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_TableLock_descriptor; @@ -8430,6 +8691,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock)other); @@ -8461,20 +8748,19 @@ public final class ZooKeeperProtos { if (other.hasCreateTime()) { setCreateTime(other.getCreateTime()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (hasTableName()) { if (!getTableName().isInitialized()) { - return false; } } if (hasLockOwner()) { if (!getLockOwner().isInitialized()) { - return false; } } @@ -8490,7 +8776,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -8500,9 +8786,8 @@ public final class ZooKeeperProtos { } private int bitField0_; - // optional .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * optional .hbase.pb.TableName table_name = 1; @@ -8515,7 +8800,7 @@ public final class ZooKeeperProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { - return tableName_; + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } @@ -8556,6 +8841,7 @@ public final class ZooKeeperProtos { public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); @@ -8574,7 +8860,7 @@ public final class ZooKeeperProtos { */ public Builder clearTableName() { if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); @@ -8597,19 +8883,20 @@ public final class ZooKeeperProtos { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { - return tableName_; + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * optional .hbase.pb.TableName table_name = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, + getTableName(), getParentForChildren(), isClean()); tableName_ = null; @@ -8617,9 +8904,8 @@ public final class ZooKeeperProtos { return tableNameBuilder_; } - // optional .hbase.pb.ServerName lock_owner = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName lockOwner_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName lockOwner_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> lockOwnerBuilder_; /** * optional .hbase.pb.ServerName lock_owner = 2; @@ -8632,7 +8918,7 @@ public final class ZooKeeperProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getLockOwner() { if (lockOwnerBuilder_ == null) { - return lockOwner_; + return lockOwner_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : lockOwner_; } else { return lockOwnerBuilder_.getMessage(); } @@ -8673,6 +8959,7 @@ public final class ZooKeeperProtos { public Builder mergeLockOwner(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (lockOwnerBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + lockOwner_ != null && lockOwner_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { lockOwner_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(lockOwner_).mergeFrom(value).buildPartial(); @@ -8691,7 +8978,7 @@ public final class ZooKeeperProtos { */ public Builder clearLockOwner() { if (lockOwnerBuilder_ == null) { - lockOwner_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + lockOwner_ = null; onChanged(); } else { lockOwnerBuilder_.clear(); @@ -8714,19 +9001,20 @@ public final class ZooKeeperProtos { if (lockOwnerBuilder_ != null) { return lockOwnerBuilder_.getMessageOrBuilder(); } else { - return lockOwner_; + return lockOwner_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : lockOwner_; } } /** * optional .hbase.pb.ServerName lock_owner = 2; */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getLockOwnerFieldBuilder() { if (lockOwnerBuilder_ == null) { - lockOwnerBuilder_ = new com.google.protobuf.SingleFieldBuilder< + lockOwnerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - lockOwner_, + getLockOwner(), getParentForChildren(), isClean()); lockOwner_ = null; @@ -8734,7 +9022,6 @@ public final class ZooKeeperProtos { return lockOwnerBuilder_; } - // optional int64 thread_id = 3; private long threadId_ ; /** * optional int64 thread_id = 3; @@ -8767,7 +9054,6 @@ public final class ZooKeeperProtos { return this; } - // optional bool is_shared = 4; private boolean isShared_ ; /** * optional bool is_shared = 4; @@ -8800,7 +9086,6 @@ public final class ZooKeeperProtos { return this; } - // optional string purpose = 5; private java.lang.Object purpose_ = ""; /** * optional string purpose = 5; @@ -8814,9 +9099,12 @@ public final class ZooKeeperProtos { public java.lang.String getPurpose() { java.lang.Object ref = purpose_; if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - purpose_ = s; + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + purpose_ = s; + } return s; } else { return (java.lang.String) ref; @@ -8874,7 +9162,6 @@ public final class ZooKeeperProtos { return this; } - // optional int64 create_time = 6; private long createTime_ ; /** * optional int64 create_time = 6; @@ -8906,22 +9193,59 @@ public final class ZooKeeperProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.TableLock) } + // @@protoc_insertion_point(class_scope:hbase.pb.TableLock) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock DEFAULT_INSTANCE; static { - defaultInstance = new TableLock(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public TableLock parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TableLock(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.TableLock getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.TableLock) } - public interface SwitchStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface SwitchStateOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SwitchState) + com.google.protobuf.MessageOrBuilder { - // optional bool enabled = 1; /** * optional bool enabled = 1; */ @@ -8932,43 +9256,35 @@ public final class ZooKeeperProtos { boolean getEnabled(); } /** - * Protobuf type {@code hbase.pb.SwitchState} - * *
    **
    * State of the switch.
    * 
+ * + * Protobuf type {@code hbase.pb.SwitchState} */ - public static final class SwitchState extends - com.google.protobuf.GeneratedMessage - implements SwitchStateOrBuilder { + public static final class SwitchState extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SwitchState) + SwitchStateOrBuilder { // Use SwitchState.newBuilder() to construct. - private SwitchState(com.google.protobuf.GeneratedMessage.Builder builder) { + private SwitchState(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SwitchState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SwitchState defaultInstance; - public static SwitchState getDefaultInstance() { - return defaultInstance; } - - public SwitchState getDefaultInstanceForType() { - return defaultInstance; + private SwitchState() { + enabled_ = false; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private SwitchState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -8998,7 +9314,7 @@ public final class ZooKeeperProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -9009,30 +9325,14 @@ public final class ZooKeeperProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SwitchState parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SwitchState(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - private int bitField0_; - // optional bool enabled = 1; public static final int ENABLED_FIELD_NUMBER = 1; private boolean enabled_; /** @@ -9048,13 +9348,11 @@ public final class ZooKeeperProtos { return enabled_; } - private void initFields() { - enabled_ = false; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; @@ -9062,16 +9360,14 @@ public final class ZooKeeperProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, enabled_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -9079,19 +9375,13 @@ public final class ZooKeeperProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, enabled_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -9107,12 +9397,10 @@ public final class ZooKeeperProtos { result = result && (getEnabled() == other.getEnabled()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -9122,9 +9410,10 @@ public final class ZooKeeperProtos { hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEnabled()) { hash = (37 * hash) + ENABLED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getEnabled()); + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getEnabled()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -9152,66 +9441,78 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.SwitchState} - * *
      **
      * State of the switch.
      * 
+ * + * Protobuf type {@code hbase.pb.SwitchState} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchStateOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SwitchState) + org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -9224,18 +9525,15 @@ public final class ZooKeeperProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); enabled_ = false; @@ -9243,10 +9541,6 @@ public final class ZooKeeperProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor; @@ -9277,6 +9571,32 @@ public final class ZooKeeperProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState)other); @@ -9291,7 +9611,8 @@ public final class ZooKeeperProtos { if (other.hasEnabled()) { setEnabled(other.getEnabled()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } @@ -9308,7 +9629,7 @@ public final class ZooKeeperProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -9318,7 +9639,6 @@ public final class ZooKeeperProtos { } private int bitField0_; - // optional bool enabled = 1; private boolean enabled_ ; /** * optional bool enabled = 1; @@ -9350,79 +9670,116 @@ public final class ZooKeeperProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.SwitchState) } + // @@protoc_insertion_point(class_scope:hbase.pb.SwitchState) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState DEFAULT_INSTANCE; static { - defaultInstance = new SwitchState(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SwitchState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SwitchState(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.SwitchState) } - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MetaRegionServer_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MetaRegionServer_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Master_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Master_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ClusterUp_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ClusterUp_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SplitLogTask_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SplitLogTask_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeprecatedTableState_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeprecatedTableState_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableCF_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TableCF_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReplicationPeer_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReplicationPeer_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReplicationState_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReplicationState_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ReplicationHLogPosition_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ReplicationHLogPosition_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableLock_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TableLock_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static final com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SwitchState_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SwitchState_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -9465,85 +9822,87 @@ public final class ZooKeeperProtos { "buf.generatedB\017ZooKeeperProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_MetaRegionServer_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_MetaRegionServer_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_MetaRegionServer_descriptor, - new java.lang.String[] { "Server", "RpcVersion", "State", }); - internal_static_hbase_pb_Master_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_Master_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_Master_descriptor, - new java.lang.String[] { "Master", "RpcVersion", "InfoPort", }); - internal_static_hbase_pb_ClusterUp_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_ClusterUp_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ClusterUp_descriptor, - new java.lang.String[] { "StartDate", }); - internal_static_hbase_pb_SplitLogTask_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_SplitLogTask_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SplitLogTask_descriptor, - new java.lang.String[] { "State", "ServerName", "Mode", }); - internal_static_hbase_pb_DeprecatedTableState_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_DeprecatedTableState_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DeprecatedTableState_descriptor, - new java.lang.String[] { "State", }); - internal_static_hbase_pb_TableCF_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_TableCF_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TableCF_descriptor, - new java.lang.String[] { "TableName", "Families", }); - internal_static_hbase_pb_ReplicationPeer_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_ReplicationPeer_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReplicationPeer_descriptor, - new java.lang.String[] { "Clusterkey", "ReplicationEndpointImpl", "Data", "Configuration", "TableCfs", "Namespaces", }); - internal_static_hbase_pb_ReplicationState_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_hbase_pb_ReplicationState_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReplicationState_descriptor, - new java.lang.String[] { "State", }); - internal_static_hbase_pb_ReplicationHLogPosition_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_hbase_pb_ReplicationHLogPosition_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_ReplicationHLogPosition_descriptor, - new java.lang.String[] { "Position", }); - internal_static_hbase_pb_TableLock_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_hbase_pb_TableLock_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_TableLock_descriptor, - new java.lang.String[] { "TableName", "LockOwner", "ThreadId", "IsShared", "Purpose", "CreateTime", }); - internal_static_hbase_pb_SwitchState_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_hbase_pb_SwitchState_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SwitchState_descriptor, - new java.lang.String[] { "Enabled", }); - return null; - } - }; + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.getDescriptor(), }, assigner); + internal_static_hbase_pb_MetaRegionServer_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_MetaRegionServer_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_MetaRegionServer_descriptor, + new java.lang.String[] { "Server", "RpcVersion", "State", }); + internal_static_hbase_pb_Master_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_Master_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Master_descriptor, + new java.lang.String[] { "Master", "RpcVersion", "InfoPort", }); + internal_static_hbase_pb_ClusterUp_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_ClusterUp_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ClusterUp_descriptor, + new java.lang.String[] { "StartDate", }); + internal_static_hbase_pb_SplitLogTask_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_hbase_pb_SplitLogTask_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SplitLogTask_descriptor, + new java.lang.String[] { "State", "ServerName", "Mode", }); + internal_static_hbase_pb_DeprecatedTableState_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_hbase_pb_DeprecatedTableState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_DeprecatedTableState_descriptor, + new java.lang.String[] { "State", }); + internal_static_hbase_pb_TableCF_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_TableCF_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TableCF_descriptor, + new java.lang.String[] { "TableName", "Families", }); + internal_static_hbase_pb_ReplicationPeer_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_hbase_pb_ReplicationPeer_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReplicationPeer_descriptor, + new java.lang.String[] { "Clusterkey", "ReplicationEndpointImpl", "Data", "Configuration", "TableCfs", "Namespaces", }); + internal_static_hbase_pb_ReplicationState_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_hbase_pb_ReplicationState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReplicationState_descriptor, + new java.lang.String[] { "State", }); + internal_static_hbase_pb_ReplicationHLogPosition_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_hbase_pb_ReplicationHLogPosition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ReplicationHLogPosition_descriptor, + new java.lang.String[] { "Position", }); + internal_static_hbase_pb_TableLock_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_hbase_pb_TableLock_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_TableLock_descriptor, + new java.lang.String[] { "TableName", "LockOwner", "ThreadId", "IsShared", "Purpose", "CreateTime", }); + internal_static_hbase_pb_SwitchState_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_hbase_pb_SwitchState_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SwitchState_descriptor, + new java.lang.String[] { "Enabled", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/util/ByteStringer.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/util/ByteStringer.java index 4ccf154..38cc0bd 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/util/ByteStringer.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/util/ByteStringer.java @@ -17,37 +17,17 @@ */ package org.apache.hadoop.hbase.shaded.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import com.google.protobuf.ByteString; -import com.google.protobuf.HBaseZeroCopyByteString; +import com.google.protobuf.UnsafeByteOperations; /** * Hack to workaround HBASE-10304 issue that keeps bubbling up when a mapreduce context. */ +// Depends on protobuf-3.1.0 feature. @InterfaceAudience.Private public class ByteStringer { - private static final Log LOG = LogFactory.getLog(ByteStringer.class); - - /** - * Flag set at class loading time. - */ - private static boolean USE_ZEROCOPYBYTESTRING = true; - - // Can I classload HBaseZeroCopyByteString without IllegalAccessError? - // If we can, use it passing ByteStrings to pb else use native ByteString though more costly - // because it makes a copy of the passed in array. - static { - try { - HBaseZeroCopyByteString.wrap(new byte [0]); - } catch (IllegalAccessError iae) { - USE_ZEROCOPYBYTESTRING = false; - LOG.debug("Failed to classload HBaseZeroCopyByteString: " + iae.toString()); - } - } - private ByteStringer() { super(); } @@ -56,14 +36,13 @@ public class ByteStringer { * Wraps a byte array in a {@link ByteString} without copying it. */ public static ByteString wrap(final byte[] array) { - return USE_ZEROCOPYBYTESTRING? HBaseZeroCopyByteString.wrap(array): ByteString.copyFrom(array); + return UnsafeByteOperations.unsafeWrap(array); } /** * Wraps a subset of a byte array in a {@link ByteString} without copying it. */ public static ByteString wrap(final byte[] array, int offset, int length) { - return USE_ZEROCOPYBYTESTRING? HBaseZeroCopyByteString.wrap(array, offset, length): - ByteString.copyFrom(array, offset, length); + return UnsafeByteOperations.unsafeWrap(array, offset, length); } } -- 2.6.1